mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 06:01:37 +03:00
Bump the SBT and Scala versions (#948)
1. This PR also re-enables windows CI.
This commit is contained in:
parent
b135615670
commit
e9b676834b
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@ -37,5 +37,8 @@ README.md @iamrecursion @kustosz
|
||||
/project @iamrecursion @kustosz
|
||||
/build.sbt @iamrecursion @kustosz @lolczak @4e6
|
||||
|
||||
# Parser
|
||||
/parser @iamrecursion @wdanilo
|
||||
|
||||
# Repo Configuration
|
||||
/.github/settings.yml @iamrecursion
|
||||
|
12
.github/settings.yml
vendored
12
.github/settings.yml
vendored
@ -207,6 +207,16 @@ branches:
|
||||
required_status_checks:
|
||||
# Require branches to be up to date before merging.
|
||||
strict: true
|
||||
contexts: ["Test Engine (macOS-latest)", "Test Engine (ubuntu-latest)", "Build Engine", "Test Parser (macOS-latest)", "Test Parser (ubuntu-latest)", " Test Parser (windows-latest) ", "license/cla"]
|
||||
contexts:
|
||||
- "Test Engine (macOS-latest)"
|
||||
- "Test Engine (ubuntu-latest)"
|
||||
- "Test Engine (windows-latest)"
|
||||
- "Build Engine"
|
||||
- "Rust Test Native (macOS-latest)"
|
||||
- "Rust Test Native (ubuntu-latest)"
|
||||
- "Rust Test Native (windows-latest)"
|
||||
- "Rust Test WASM"
|
||||
- "Rust Lint"
|
||||
- "license/cla"
|
||||
enforce_admins: null
|
||||
restrictions: null
|
||||
|
115
.github/workflows/rust.yml
vendored
115
.github/workflows/rust.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Parser CI
|
||||
name: Rust CI
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -8,31 +8,27 @@ on:
|
||||
|
||||
env:
|
||||
wasmpackVersion: 0.8.1
|
||||
nodeVersion: 12.18.0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Parser
|
||||
name: Rust Test Native
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 10
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macOS-latest, ubuntu-latest, windows-latest]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout Parser Sources
|
||||
- name: Checkout Library Sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Install Tooling
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2020-06-09
|
||||
toolchain: nightly-2019-11-04
|
||||
override: true
|
||||
- name: Install wasm-pack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: wasm-pack --version ${{ env.wasmpackVersion }}
|
||||
|
||||
# Caches
|
||||
- name: Cache Cargo Registry
|
||||
@ -49,7 +45,104 @@ jobs:
|
||||
restore-keys: ${{ runner.os }}-cargo-build
|
||||
|
||||
# Tests
|
||||
- name: Test Parser
|
||||
- name: Test Native
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
|
||||
test-wasm:
|
||||
name: Rust Test WASM
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout Library Sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Install Tooling
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2019-11-04
|
||||
override: true
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ env.nodeVersion }}
|
||||
- name: Install wasm-pack
|
||||
# We could use cargo install wasm-pack, but that takes 3.5 minutes
|
||||
# compared to a few seconds.
|
||||
env:
|
||||
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v${{ env.wasmpackVersion }}
|
||||
WASMPACKDIR: wasm-pack-v${{ env.wasmpackVersion }}-x86_64-unknown-linux-musl
|
||||
shell: bash
|
||||
run: |
|
||||
curl -L "$WASMPACKURL/$WASMPACKDIR.tar.gz" | tar -xz -C .
|
||||
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
|
||||
rm -r $WASMPACKDIR
|
||||
|
||||
# Caches
|
||||
- name: Cache Cargo Registry
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**Cargo.toml') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-registry
|
||||
- name: Cache Cargo Test
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ./target/rust
|
||||
key: ${{ runner.os }}-cargo-build-${{ hashFiles('**Cargo.toml') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-build
|
||||
|
||||
# Tests
|
||||
- name: Test WASM
|
||||
shell: bash
|
||||
run: |
|
||||
cd parser
|
||||
for dir in `ls`; do
|
||||
cd $dir;
|
||||
wasm-pack test --node;
|
||||
cd ..;
|
||||
done;
|
||||
cd ..
|
||||
|
||||
lint:
|
||||
name: Rust Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout Library Sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Install Tooling
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2019-11-04
|
||||
override: true
|
||||
- name: Install Clippy
|
||||
run: rustup component add clippy
|
||||
|
||||
# Caches
|
||||
- name: Cache Cargo Registry
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**Cargo.toml') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-registry
|
||||
- name: Cache Cargo Test
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ./target/rust
|
||||
key: ${{ runner.os }}-cargo-build-${{ hashFiles('**Cargo.toml') }}
|
||||
restore-keys: ${{ runner.os }}-cargo-build
|
||||
|
||||
# Lint
|
||||
- name: Lint Code
|
||||
shell: bash
|
||||
run: |
|
||||
cargo clippy
|
||||
|
4
.github/workflows/scala.yml
vendored
4
.github/workflows/scala.yml
vendored
@ -11,7 +11,7 @@ env:
|
||||
graalVersion: 20.1.0
|
||||
javaVersion: java8
|
||||
# Please ensure that this is in sync with project/build.properties
|
||||
sbtVersion: 1.3.10
|
||||
sbtVersion: 1.3.13
|
||||
excludedPaths: |
|
||||
.github/PULL_REQUEST_TEMPLATE.md
|
||||
.github/CODEOWNERS
|
||||
@ -32,7 +32,7 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macOS-latest, ubuntu-latest]
|
||||
os: [macOS-latest, ubuntu-latest, windows-latest]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
39
build.sbt
39
build.sbt
@ -13,7 +13,7 @@ import scala.sys.process._
|
||||
// === Global Configuration ===================================================
|
||||
// ============================================================================
|
||||
|
||||
val scalacVersion = "2.13.2"
|
||||
val scalacVersion = "2.13.3"
|
||||
val graalVersion = "20.1.0"
|
||||
val ensoVersion = "0.0.1"
|
||||
organization in ThisBuild := "org.enso"
|
||||
@ -51,7 +51,6 @@ scalacOptions in ThisBuild ++= Seq(
|
||||
"-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
|
||||
"-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
|
||||
"-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
|
||||
"-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
|
||||
"-Xlint:nullary-unit", // Warn when nullary methods return Unit.
|
||||
"-Xlint:option-implicit", // Option.apply used implicit view.
|
||||
"-Xlint:package-object-classes", // Class or object defined in package object.
|
||||
@ -95,6 +94,7 @@ lazy val buildNativeImage =
|
||||
lazy val enso = (project in file("."))
|
||||
.settings(version := "0.1")
|
||||
.aggregate(
|
||||
`core-definition`,
|
||||
`interpreter-dsl`,
|
||||
`json-rpc-server-test`,
|
||||
`json-rpc-server`,
|
||||
@ -143,8 +143,8 @@ lazy val enso = (project in file("."))
|
||||
def akkaPkg(name: String) = akkaURL %% s"akka-$name" % akkaVersion
|
||||
def akkaHTTPPkg(name: String) = akkaURL %% s"akka-$name" % akkaHTTPVersion
|
||||
val akkaURL = "com.typesafe.akka"
|
||||
val akkaVersion = "2.6.4"
|
||||
val akkaHTTPVersion = "10.2.0-M1"
|
||||
val akkaVersion = "2.6.6"
|
||||
val akkaHTTPVersion = "10.2.0-RC1"
|
||||
val akkaMockSchedulerVersion = "0.5.5"
|
||||
val akkaActor = akkaPkg("actor")
|
||||
val akkaStream = akkaPkg("stream")
|
||||
@ -159,7 +159,7 @@ val akka =
|
||||
|
||||
// === Cats ===================================================================
|
||||
|
||||
val catsVersion = "2.2.0-M1"
|
||||
val catsVersion = "2.2.0-M3"
|
||||
val kittensVersion = "2.1.0"
|
||||
val cats = {
|
||||
Seq(
|
||||
@ -173,9 +173,10 @@ val cats = {
|
||||
|
||||
// === Circe ==================================================================
|
||||
|
||||
val circeVersion = "0.13.0"
|
||||
val circeYamlVersion = "0.12.0"
|
||||
val enumeratumCirceVersion = "1.5.23"
|
||||
val circeVersion = "0.14.0-M1"
|
||||
val circeYamlVersion = "0.13.1"
|
||||
val enumeratumCirceVersion = "1.6.1"
|
||||
val circeGenericExtrasVersion = "0.13.0"
|
||||
val circe = Seq("circe-core", "circe-generic", "circe-parser")
|
||||
.map("io.circe" %% _ % circeVersion)
|
||||
|
||||
@ -183,7 +184,7 @@ val circe = Seq("circe-core", "circe-generic", "circe-parser")
|
||||
|
||||
val commonsCollectionsVersion = "4.4"
|
||||
val commonsLangVersion = "3.10"
|
||||
val commonsIoVersion = "2.6"
|
||||
val commonsIoVersion = "2.7"
|
||||
val commonsTextVersion = "1.8"
|
||||
val commonsMathVersion = "3.6.1"
|
||||
val commonsCompressVersion = "1.20"
|
||||
@ -199,7 +200,7 @@ val commons = Seq(
|
||||
|
||||
// === Jackson ================================================================
|
||||
|
||||
val jacksonVersion = "2.10.3"
|
||||
val jacksonVersion = "2.11.1"
|
||||
val jackson = Seq(
|
||||
"com.fasterxml.jackson.dataformat" % "jackson-dataformat-cbor" % jacksonVersion,
|
||||
"com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion,
|
||||
@ -216,7 +217,7 @@ val jmh = Seq(
|
||||
|
||||
// === Monocle ================================================================
|
||||
|
||||
val monocleVersion = "2.0.4"
|
||||
val monocleVersion = "2.0.5"
|
||||
val monocle = {
|
||||
Seq(
|
||||
"com.github.julien-truffaut" %% "monocle-core" % monocleVersion,
|
||||
@ -234,7 +235,7 @@ val scalaCompiler = Seq(
|
||||
|
||||
// === Splain =================================================================
|
||||
|
||||
val splainVersion = "0.5.4"
|
||||
val splainVersion = "0.5.7"
|
||||
val splainOptions = Seq(
|
||||
"-P:splain:infix:true",
|
||||
"-P:splain:foundreq:true",
|
||||
@ -255,23 +256,23 @@ val zio = Seq(
|
||||
|
||||
val bcpkixJdk15Version = "1.65"
|
||||
val declineVersion = "1.2.0"
|
||||
val directoryWatcherVersion = "0.9.9"
|
||||
val directoryWatcherVersion = "0.9.10"
|
||||
val flatbuffersVersion = "1.12.0"
|
||||
val guavaVersion = "29.0-jre"
|
||||
val jlineVersion = "3.14.1"
|
||||
val jlineVersion = "3.15.0"
|
||||
val jupyterJvmBasekernelVersion = "2.3.0"
|
||||
val kindProjectorVersion = "0.11.0"
|
||||
val logbackClassicVersion = "1.2.3"
|
||||
val mockitoScalaVersion = "1.14.0"
|
||||
val newtypeVersion = "0.4.3"
|
||||
val mockitoScalaVersion = "1.14.8"
|
||||
val newtypeVersion = "0.4.4"
|
||||
val pprintVersion = "0.5.9"
|
||||
val pureconfigVersion = "0.12.2"
|
||||
val pureconfigVersion = "0.13.0"
|
||||
val refinedVersion = "0.9.14"
|
||||
val scalacheckVersion = "1.14.3"
|
||||
val scalacticVersion = "3.3.0-SNAP2"
|
||||
val scalaLoggingVersion = "3.9.2"
|
||||
val scalameterVersion = "0.19"
|
||||
val scalatagsVersion = "0.9.0"
|
||||
val scalatagsVersion = "0.9.1"
|
||||
val scalatestVersion = "3.3.0-SNAP2"
|
||||
val shapelessVersion = "2.4.0-M1"
|
||||
val slickVersion = "3.3.2"
|
||||
@ -639,7 +640,7 @@ lazy val `language-server` = (project in file("engine/language-server"))
|
||||
libraryDependencies ++= akka ++ circe ++ Seq(
|
||||
"ch.qos.logback" % "logback-classic" % logbackClassicVersion,
|
||||
"com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion,
|
||||
"io.circe" %% "circe-generic-extras" % circeVersion,
|
||||
"io.circe" %% "circe-generic-extras" % circeGenericExtrasVersion,
|
||||
"io.circe" %% "circe-literal" % circeVersion,
|
||||
"org.bouncycastle" % "bcpkix-jdk15on" % bcpkixJdk15Version,
|
||||
"dev.zio" %% "zio" % zioVersion,
|
||||
|
@ -12,13 +12,19 @@ through community involvement can Enso be the best it can be! There are a whole
|
||||
host of ways to contribute, and every single one is appreciated. The major
|
||||
sections of this document are linked below:
|
||||
|
||||
<!-- MarkdownTOC levels="2" autolink="true" -->
|
||||
<!-- MarkdownTOC levels="2,3" autolink="true" -->
|
||||
|
||||
- [The Contributor License Agreement](#the-contributor-license-agreement)
|
||||
- [Issues](#issues)
|
||||
- [Feature Enhancements](#feature-enhancements)
|
||||
- [Bug Reports](#bug-reports)
|
||||
- [Hacking on Enso](#hacking-on-enso)
|
||||
- [Design Documentation](#design-documentation)
|
||||
- [System Requirements](#system-requirements)
|
||||
- [Getting the Sources](#getting-the-sources)
|
||||
- [Getting Set Up \(Rust\)](#getting-set-up-rust)
|
||||
- [Building Enso](#building-enso)
|
||||
- [Running Enso](#running-enso)
|
||||
- [Pull Requests](#pull-requests)
|
||||
- [Documentation](#documentation)
|
||||
- [Issue Triage](#issue-triage)
|
||||
@ -116,6 +122,9 @@ In order to build and run Enso you will need the following tools:
|
||||
JVM.
|
||||
- [Flatbuffers Compiler](https://google.github.io/flatbuffers) with version
|
||||
1.12.0.
|
||||
- [Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html),
|
||||
the rust build tool.
|
||||
- [Rustup](https://rustup.rs), the rust toolchain management utility.
|
||||
|
||||
Managing multiple JVM installations can be a pain, so some of the team use
|
||||
[Jenv](http://www.jenv.be/): A useful tool for managing multiple JVMs.
|
||||
@ -148,6 +157,20 @@ git clone https://github.com/enso-org/enso.git
|
||||
git clone git@github.com:enso-org/enso.git
|
||||
```
|
||||
|
||||
### Getting Set Up (Rust)
|
||||
This project currently requires a specific nightly rust toolchain, as well as a
|
||||
special set-up step in SBT. To get this project set up, you can run the
|
||||
following commands:
|
||||
|
||||
```bash
|
||||
rustup toolchain install nightly-2019-11-04
|
||||
rustup override set nightly-2019-11-04
|
||||
rustup component add clippy
|
||||
```
|
||||
|
||||
Please note that once the parser is integrated into the SBT build, the
|
||||
rust-related commands will be automatically performed for you.
|
||||
|
||||
### Building Enso
|
||||
There are multiple projects in this repository, but all can be built, run and
|
||||
tested using `sbt`. As long as your configuration is correct, with the correct
|
||||
|
@ -10,7 +10,7 @@ order: 4
|
||||
This document outlines the security policy for Enso and its libraries.
|
||||
|
||||
> **If you believe that you have found a vulnerability in Enso or one of its
|
||||
> libraries, please see the section on
|
||||
> libraries, please see the section on
|
||||
> [reporting a vulnerability](#reporting-a-vulnerability) below.**
|
||||
|
||||
<!-- MarkdownTOC levels="2" autolink="true" -->
|
||||
|
@ -55,7 +55,7 @@ case class ZioExec(runtime: Runtime[ZEnv]) extends Exec[ZioExec.IO] {
|
||||
* @return a future containing either a failure or a result
|
||||
*/
|
||||
override def exec[E, A](op: ZIO[ZEnv, E, A]): Future[Either[E, A]] = {
|
||||
val promise = Promise[Either[E, A]]
|
||||
val promise = Promise[Either[E, A]]()
|
||||
runtime.unsafeRunAsync(op) {
|
||||
_.fold(
|
||||
ZioExec.completeFailure(promise, _),
|
||||
@ -77,7 +77,7 @@ case class ZioExec(runtime: Runtime[ZEnv]) extends Exec[ZioExec.IO] {
|
||||
timeout: FiniteDuration,
|
||||
op: ZIO[ZEnv, E, A]
|
||||
): Future[Either[E, A]] = {
|
||||
val promise = Promise[Either[E, A]]
|
||||
val promise = Promise[Either[E, A]]()
|
||||
runtime.unsafeRunAsync(
|
||||
op.disconnect.timeout(zio.duration.Duration.fromScala(timeout))
|
||||
) {
|
||||
|
@ -307,7 +307,7 @@ class CollaborativeBuffer(
|
||||
Some(CapabilityRegistration(CanEdit(bufferPath)))
|
||||
else
|
||||
None
|
||||
sender ! OpenFileResponse(Right(OpenFileResult(buffer, writeCapability)))
|
||||
sender() ! OpenFileResponse(Right(OpenFileResult(buffer, writeCapability)))
|
||||
context.become(
|
||||
collaborativeEditing(
|
||||
buffer,
|
||||
|
@ -11,21 +11,14 @@ import org.enso.compiler.core.IR.{Expression, Module}
|
||||
import org.enso.compiler.exception.{CompilationAbortedException, CompilerError}
|
||||
import org.enso.compiler.pass.PassManager
|
||||
import org.enso.compiler.pass.analyse._
|
||||
import org.enso.interpreter.Language
|
||||
import org.enso.interpreter.node.{ExpressionNode => RuntimeExpression}
|
||||
import org.enso.interpreter.runtime.Context
|
||||
import org.enso.interpreter.runtime.error.ModuleDoesNotExistException
|
||||
import org.enso.interpreter.runtime.scope.{
|
||||
LocalScope,
|
||||
ModuleScope,
|
||||
TopLevelScope
|
||||
}
|
||||
import org.enso.interpreter.runtime.scope.{LocalScope, ModuleScope}
|
||||
import org.enso.polyglot.LanguageInfo
|
||||
import org.enso.syntax.text.Parser.IDMap
|
||||
import org.enso.syntax.text.{AST, Parser}
|
||||
|
||||
import scala.annotation.unused
|
||||
|
||||
/** This class encapsulates the static transformation processes that take place
|
||||
* on source code, including parsing, desugaring, type-checking, static
|
||||
* analysis, and optimisation.
|
||||
@ -196,16 +189,17 @@ class Compiler(private val context: Context) {
|
||||
ir: IR.Expression,
|
||||
source: Source,
|
||||
inlineContext: InlineContext
|
||||
): Unit = if (context.isStrictErrors) {
|
||||
val errors = GatherDiagnostics
|
||||
.runExpression(ir, inlineContext)
|
||||
.unsafeGetMetadata(
|
||||
GatherDiagnostics,
|
||||
"No diagnostics metadata right after the gathering pass."
|
||||
)
|
||||
.diagnostics
|
||||
reportDiagnostics(errors, source)
|
||||
}
|
||||
): Unit =
|
||||
if (context.isStrictErrors) {
|
||||
val errors = GatherDiagnostics
|
||||
.runExpression(ir, inlineContext)
|
||||
.unsafeGetMetadata(
|
||||
GatherDiagnostics,
|
||||
"No diagnostics metadata right after the gathering pass."
|
||||
)
|
||||
.diagnostics
|
||||
reportDiagnostics(errors, source)
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the strict error handling mechanism (if enabled in the language
|
||||
@ -242,7 +236,7 @@ class Compiler(private val context: Context) {
|
||||
diagnostics: List[IR.Diagnostic],
|
||||
source: Source
|
||||
): Unit = {
|
||||
val errors = diagnostics.collect { case e: IR.Error => e }
|
||||
val errors = diagnostics.collect { case e: IR.Error => e }
|
||||
val warnings = diagnostics.collect { case w: IR.Warning => w }
|
||||
|
||||
if (warnings.nonEmpty) {
|
||||
|
@ -1828,7 +1828,7 @@ object Core {
|
||||
* empty list.
|
||||
*/
|
||||
def ofLength(length: Int)(implicit core: Core): Node = {
|
||||
val nil = Node.New.MetaNil
|
||||
val nil = Node.New.MetaNil()
|
||||
|
||||
@tailrec
|
||||
def go(tail: Node, remainingLength: Int): Node = {
|
||||
|
@ -6,7 +6,6 @@ import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.exception.CompilerError
|
||||
|
||||
import scala.annotation.unused
|
||||
import scala.collection.mutable
|
||||
|
||||
// TODO [AA] In the future, the pass ordering should be _computed_ from the list
|
||||
|
@ -11,7 +11,6 @@ import org.enso.compiler.pass.desugar._
|
||||
import org.enso.compiler.pass.lint.UnusedBindings
|
||||
import org.enso.syntax.text.Debug
|
||||
|
||||
import scala.annotation.unused
|
||||
import scala.collection.mutable
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.reflect.ClassTag
|
||||
|
@ -2,10 +2,13 @@ package org.enso.compiler.pass.desugar
|
||||
|
||||
import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.core.IR.Error.Redefined
|
||||
import org.enso.compiler.exception.CompilerError
|
||||
import org.enso.compiler.pass.IRPass
|
||||
import org.enso.compiler.pass.analyse.{AliasAnalysis, DataflowAnalysis, TailCall}
|
||||
import org.enso.compiler.pass.analyse.{
|
||||
AliasAnalysis,
|
||||
DataflowAnalysis,
|
||||
TailCall
|
||||
}
|
||||
import org.enso.compiler.pass.lint.UnusedBindings
|
||||
import org.enso.compiler.pass.optimise.LambdaConsolidate
|
||||
|
||||
|
@ -8,7 +8,6 @@ import org.enso.compiler.pass.analyse.{
|
||||
DataflowAnalysis,
|
||||
DemandAnalysis
|
||||
}
|
||||
import org.enso.compiler.pass.optimise.LambdaConsolidate
|
||||
|
||||
/** This pass converts usages of operators to calls to standard functions.
|
||||
*
|
||||
|
@ -5,14 +5,10 @@ import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.core.IR.{Case, Pattern}
|
||||
import org.enso.compiler.exception.CompilerError
|
||||
import org.enso.compiler.pass.IRPass
|
||||
import org.enso.compiler.pass.analyse.{
|
||||
AliasAnalysis,
|
||||
DataflowAnalysis,
|
||||
TailCall
|
||||
}
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis
|
||||
import org.enso.compiler.pass.desugar._
|
||||
import org.enso.compiler.pass.optimise.LambdaConsolidate
|
||||
import org.enso.compiler.pass.resolve.{DocumentationComments, IgnoredBindings}
|
||||
import org.enso.compiler.pass.resolve.IgnoredBindings
|
||||
|
||||
import scala.annotation.unused
|
||||
|
||||
|
@ -3,7 +3,6 @@ package org.enso.compiler.pass.optimise
|
||||
import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.core.IR.DefinitionArgument
|
||||
import org.enso.compiler.core.ir.MetadataStorage
|
||||
import org.enso.compiler.exception.CompilerError
|
||||
import org.enso.compiler.pass.IRPass
|
||||
import org.enso.compiler.pass.analyse.{
|
||||
@ -76,13 +75,14 @@ case object LambdaConsolidate extends IRPass {
|
||||
override def runModule(
|
||||
ir: IR.Module,
|
||||
moduleContext: ModuleContext
|
||||
): IR.Module = ir.transformExpressions {
|
||||
case x =>
|
||||
runExpression(
|
||||
x,
|
||||
new InlineContext(freshNameSupply = moduleContext.freshNameSupply)
|
||||
)
|
||||
}
|
||||
): IR.Module =
|
||||
ir.transformExpressions {
|
||||
case x =>
|
||||
runExpression(
|
||||
x,
|
||||
new InlineContext(freshNameSupply = moduleContext.freshNameSupply)
|
||||
)
|
||||
}
|
||||
|
||||
/** Performs lambda consolidation on an expression.
|
||||
*
|
||||
@ -373,7 +373,7 @@ case object LambdaConsolidate extends IRPass {
|
||||
.map(link => aliasInfo.graph.getOccurrence(link.source))
|
||||
.collect {
|
||||
case Some(
|
||||
AliasAnalysis.Graph.Occurrence.Use(_, _, identifier, _)
|
||||
AliasAnalysis.Graph.Occurrence.Use(_, _, identifier, _)
|
||||
) =>
|
||||
identifier
|
||||
}
|
||||
@ -394,8 +394,8 @@ case object LambdaConsolidate extends IRPass {
|
||||
): List[IR.DefinitionArgument] = {
|
||||
argsWithShadowed.map {
|
||||
case (
|
||||
spec @ IR.DefinitionArgument.Specified(name, _, _, _, _, _),
|
||||
isShadowed
|
||||
spec @ IR.DefinitionArgument.Specified(name, _, _, _, _, _),
|
||||
isShadowed
|
||||
) =>
|
||||
val newName =
|
||||
if (isShadowed) {
|
||||
|
@ -3,7 +3,6 @@ package org.enso.compiler.pass.resolve
|
||||
import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.IRPass
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis
|
||||
import org.enso.compiler.pass.desugar.{ComplexType, GenerateMethodBodies}
|
||||
|
||||
import scala.annotation.unused
|
||||
|
@ -13,7 +13,7 @@ import org.enso.compiler.pass.lint.UnusedBindings
|
||||
import org.enso.compiler.pass.optimise.LambdaConsolidate
|
||||
import org.enso.compiler.pass.resolve.TypeSignatures.Signature
|
||||
|
||||
import scala.annotation.{nowarn, unused}
|
||||
import scala.annotation.unused
|
||||
|
||||
/** This pass is responsible for analysing type signatures to determine which
|
||||
* arguments in a function definition are suspended.
|
||||
@ -70,9 +70,10 @@ case object SuspendedArguments extends IRPass {
|
||||
override def runModule(
|
||||
ir: IR.Module,
|
||||
@unused moduleContext: ModuleContext
|
||||
): IR.Module = ir.copy(
|
||||
bindings = ir.bindings.map(resolveModuleBinding)
|
||||
)
|
||||
): IR.Module =
|
||||
ir.copy(
|
||||
bindings = ir.bindings.map(resolveModuleBinding)
|
||||
)
|
||||
|
||||
/** Resolves suspended arguments in an arbitrary expression.
|
||||
*
|
||||
@ -187,12 +188,12 @@ case object SuspendedArguments extends IRPass {
|
||||
def toSegments(signature: IR.Expression): List[IR.Expression] = {
|
||||
signature match {
|
||||
case IR.Application.Operator.Binary(
|
||||
l,
|
||||
IR.Name.Literal("->", _, _, _),
|
||||
r,
|
||||
_,
|
||||
_,
|
||||
_
|
||||
l,
|
||||
IR.Name.Literal("->", _, _, _),
|
||||
r,
|
||||
_,
|
||||
_,
|
||||
_
|
||||
) =>
|
||||
l.value :: toSegments(r.value)
|
||||
case IR.Function.Lambda(args, body, _, _, _, _) =>
|
||||
@ -222,15 +223,16 @@ case object SuspendedArguments extends IRPass {
|
||||
*/
|
||||
def markSuspended(
|
||||
pair: (IR.DefinitionArgument, IR.Expression)
|
||||
): IR.DefinitionArgument = pair match {
|
||||
case (arg, typ) =>
|
||||
arg match {
|
||||
case spec: IR.DefinitionArgument.Specified =>
|
||||
if (representsSuspended(typ)) {
|
||||
spec.copy(suspended = true)
|
||||
} else spec.copy(suspended = false)
|
||||
}
|
||||
}
|
||||
): IR.DefinitionArgument =
|
||||
pair match {
|
||||
case (arg, typ) =>
|
||||
arg match {
|
||||
case spec: IR.DefinitionArgument.Specified =>
|
||||
if (representsSuspended(typ)) {
|
||||
spec.copy(suspended = true)
|
||||
} else spec.copy(suspended = false)
|
||||
}
|
||||
}
|
||||
|
||||
/** Computes the suspensions for the arguments list of a function.
|
||||
*
|
||||
|
@ -2,17 +2,9 @@ package org.enso.interpreter.instrument
|
||||
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
import com.oracle.truffle.api.TruffleStackTrace.LazyStackTrace
|
||||
import com.oracle.truffle.api.{TruffleStackTrace, TruffleStackTraceElement}
|
||||
import com.oracle.truffle.api.nodes.RootNode
|
||||
import com.oracle.truffle.api.TruffleStackTrace
|
||||
import org.enso.interpreter.instrument.ReplDebuggerInstrument.ReplExecutionEventNode
|
||||
import org.enso.polyglot.debugger.{
|
||||
Debugger,
|
||||
EvaluationRequest,
|
||||
ListBindingsRequest,
|
||||
Request,
|
||||
SessionExitRequest
|
||||
}
|
||||
import org.enso.polyglot.debugger._
|
||||
import org.graalvm.polyglot.io.MessageEndpoint
|
||||
|
||||
import scala.jdk.CollectionConverters._
|
||||
|
@ -12,9 +12,6 @@ import org.enso.interpreter.service.ExecutionService
|
||||
import org.enso.polyglot.runtime.Runtime.Api
|
||||
import org.graalvm.polyglot.io.MessageEndpoint
|
||||
|
||||
import scala.concurrent.Await
|
||||
import scala.concurrent.duration._
|
||||
|
||||
/**
|
||||
* A message endpoint implementation used by the
|
||||
* [[org.enso.interpreter.instrument.RuntimeServerInstrument]].
|
||||
|
@ -1,13 +1,13 @@
|
||||
package org.enso.interpreter.instrument.execution
|
||||
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.{Callable, Executors}
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import java.util.concurrent.{Callable, Executors}
|
||||
import java.util.logging.Level
|
||||
|
||||
import org.enso.interpreter.instrument.InterpreterContext
|
||||
import org.enso.interpreter.instrument.job.Job
|
||||
import org.enso.polyglot.{RuntimeOptions, RuntimeServerInfo}
|
||||
import org.enso.polyglot.RuntimeServerInfo
|
||||
|
||||
import scala.concurrent.{Future, Promise}
|
||||
import scala.util.control.NonFatal
|
||||
@ -52,7 +52,7 @@ class JobExecutionEngine(
|
||||
locking = locking
|
||||
)
|
||||
|
||||
/** @inheritdoc **/
|
||||
/** @inheritdoc * */
|
||||
override def run[A](job: Job[A]): Future[A] = {
|
||||
val jobId = UUID.randomUUID()
|
||||
val promise = Promise[A]()
|
||||
@ -78,7 +78,7 @@ class JobExecutionEngine(
|
||||
promise.future
|
||||
}
|
||||
|
||||
/** @inheritdoc **/
|
||||
/** @inheritdoc * */
|
||||
override def abortAllJobs(): Unit = {
|
||||
val allJobs = runningJobsRef.get()
|
||||
val cancellableJobs = allJobs.filter(_.job.isCancellable)
|
||||
@ -89,7 +89,7 @@ class JobExecutionEngine(
|
||||
.checkInterrupts()
|
||||
}
|
||||
|
||||
/** @inheritdoc **/
|
||||
/** @inheritdoc * */
|
||||
override def abortJobs(contextId: UUID): Unit = {
|
||||
val allJobs = runningJobsRef.get()
|
||||
val contextJobs = allJobs.filter(_.job.contextIds.contains(contextId))
|
||||
@ -102,7 +102,7 @@ class JobExecutionEngine(
|
||||
.checkInterrupts()
|
||||
}
|
||||
|
||||
/** @inheritdoc **/
|
||||
/** @inheritdoc * */
|
||||
override def stop(): Unit = {
|
||||
val allJobs = runningJobsRef.get()
|
||||
allJobs.foreach(_.future.cancel(true))
|
||||
|
@ -1,16 +1,12 @@
|
||||
package org.enso.interpreter.runtime.scope
|
||||
|
||||
import com.oracle.truffle.api.frame.{FrameDescriptor, FrameSlot}
|
||||
import org.enso.compiler.pass.analyse.{
|
||||
AliasAnalysis,
|
||||
CachePreferenceAnalysis,
|
||||
DataflowAnalysis
|
||||
}
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis.Graph
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis.Graph.{
|
||||
Occurrence,
|
||||
Scope => AliasScope
|
||||
}
|
||||
import org.enso.compiler.pass.analyse.{AliasAnalysis, DataflowAnalysis}
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.jdk.CollectionConverters._
|
||||
@ -127,9 +123,9 @@ class LocalScope(
|
||||
scope.occurrences.foreach {
|
||||
case x: Occurrence.Def =>
|
||||
parentResult += x.symbol -> new FramePointer(
|
||||
level,
|
||||
frameSlots(x.id)
|
||||
)
|
||||
level,
|
||||
frameSlots(x.id)
|
||||
)
|
||||
case _ =>
|
||||
}
|
||||
parentResult
|
||||
|
@ -3,7 +3,7 @@ package org.enso.compiler.test
|
||||
import org.enso.compiler.codegen.AstToIr
|
||||
import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.{IRPass, PassManager}
|
||||
import org.enso.compiler.pass.PassManager
|
||||
import org.enso.syntax.text.{AST, Parser}
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import org.scalatest.wordspec.AnyWordSpecLike
|
||||
@ -110,8 +110,8 @@ trait CompilerRunner {
|
||||
*
|
||||
* @param source the source code to preprocess
|
||||
*/
|
||||
implicit class Preprocess(source: String)(
|
||||
implicit passManager: PassManager
|
||||
implicit class Preprocess(source: String)(implicit
|
||||
passManager: PassManager
|
||||
) {
|
||||
|
||||
/** Translates the source code into appropriate IR for testing this pass.
|
||||
@ -127,8 +127,8 @@ trait CompilerRunner {
|
||||
* @return IR appropriate for testing the alias analysis pass as an
|
||||
* expression
|
||||
*/
|
||||
def preprocessExpression(
|
||||
implicit inlineContext: InlineContext
|
||||
def preprocessExpression(implicit
|
||||
inlineContext: InlineContext
|
||||
): Option[IR.Expression] = {
|
||||
source.toIrExpression.map(_.runPasses(passManager, inlineContext))
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ import org.enso.compiler.pass.optimise.UnreachableMatchBranches
|
||||
import org.enso.compiler.pass.resolve.{
|
||||
DocumentationComments,
|
||||
IgnoredBindings,
|
||||
SuspendedArguments,
|
||||
TypeFunctions,
|
||||
TypeSignatures
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ class FreshNameSupplyTest extends CompilerTest {
|
||||
val newName = fns.newName()
|
||||
|
||||
if (seenNames contains newName) {
|
||||
fail
|
||||
fail()
|
||||
}
|
||||
|
||||
succeed
|
||||
|
@ -7,6 +7,8 @@ import org.scalatest.BeforeAndAfterEach
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
/** This file tests the primitive, low-level operations on core.
|
||||
*
|
||||
* It does _not_ utilise the high-level API, and instead works directly with
|
||||
@ -15,6 +17,7 @@ import org.scalatest.matchers.should.Matchers
|
||||
* PLEASE NOTE: Many of these tests will be removed once the smart constructors
|
||||
* exist.
|
||||
*/
|
||||
@nowarn("cat=deprecation")
|
||||
class CorePrimTest extends AnyFlatSpec with Matchers with BeforeAndAfterEach {
|
||||
|
||||
// === Test Setup ===========================================================
|
||||
|
@ -42,7 +42,7 @@ class CoreTest extends CompilerTest {
|
||||
val bindingsList = Utility.ListOps.from(binding1)
|
||||
|
||||
val block =
|
||||
Node.New.Block(bindingsList, binding2, dummyLocation).getOrElse(fail)
|
||||
Node.New.Block(bindingsList, binding2, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have multiple parents for the node at the bottom of the diamond" in {
|
||||
fnName.parents.size shouldEqual 2
|
||||
@ -135,10 +135,10 @@ class CoreTest extends CompilerTest {
|
||||
val empty = Node.New.Empty()
|
||||
|
||||
val tempNil = Node.New.MetaNil()
|
||||
val cons4 = Node.New.MetaList(empty, tempNil).getOrElse(fail)
|
||||
val cons3 = Node.New.MetaList(empty, cons4).getOrElse(fail)
|
||||
val cons2 = Node.New.MetaList(empty, cons3).getOrElse(fail)
|
||||
val cons1 = Node.New.MetaList(empty, cons2).getOrElse(fail)
|
||||
val cons4 = Node.New.MetaList(empty, tempNil).getOrElse(fail())
|
||||
val cons3 = Node.New.MetaList(empty, cons4).getOrElse(fail())
|
||||
val cons2 = Node.New.MetaList(empty, cons3).getOrElse(fail())
|
||||
val cons1 = Node.New.MetaList(empty, cons2).getOrElse(fail())
|
||||
|
||||
// Link the nodes in a loop
|
||||
val loopLink = Link.New.Connected(cons4, cons1)
|
||||
@ -200,7 +200,7 @@ class CoreTest extends CompilerTest {
|
||||
|
||||
"be able to contain complex structures" in {
|
||||
val bindingRef =
|
||||
Utility.ListOps.at(list, 0).getOrElse(fail).unsafeAs[NodeShape.Binding]
|
||||
Utility.ListOps.at(list, 0).getOrElse(fail()).unsafeAs[NodeShape.Binding]
|
||||
|
||||
bindingRef shouldEqual myBinding
|
||||
|
||||
@ -224,7 +224,7 @@ class CoreTest extends CompilerTest {
|
||||
.componentRefFromIndex[Links](listEnd.parents.head)
|
||||
.source
|
||||
.as[NodeShape.MetaList]
|
||||
.getOrElse(fail)
|
||||
.getOrElse(fail())
|
||||
cons3ref.head.target.is[NodeShape.Name] shouldEqual true
|
||||
cons3ref.head.target shouldEqual myFnName
|
||||
|
||||
|
@ -62,11 +62,11 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
if (Utility.ListOps.equals(xs, errList)) {
|
||||
succeed
|
||||
} else {
|
||||
fail
|
||||
fail()
|
||||
}
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
case Right(_) => fail
|
||||
case Right(_) => fail()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -88,7 +88,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val emptyNode = Node.New.Empty()
|
||||
val nilNode = Node.New.MetaNil()
|
||||
val listNode =
|
||||
Node.New.MetaList(emptyNode, nilNode).getOrElse(fail)
|
||||
Node.New.MetaList(emptyNode, nilNode).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
listNode.location shouldEqual Node.Constants.invalidLocation
|
||||
@ -225,7 +225,9 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val name = Node.New.Name("MyModule", dummyLocation)
|
||||
|
||||
val module =
|
||||
Node.New.ModuleDef(name, importNil, defNil, dummyLocation).getOrElse(fail)
|
||||
Node.New
|
||||
.ModuleDef(name, importNil, defNil, dummyLocation)
|
||||
.getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
module.location shouldEqual dummyLocation
|
||||
@ -263,7 +265,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val segmentsNil = Node.New.MetaNil()
|
||||
val empty = Node.New.Empty()
|
||||
val imp =
|
||||
Node.New.Import(segmentsNil, dummyLocation).getOrElse(fail)
|
||||
Node.New.Import(segmentsNil, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
imp.location shouldEqual dummyLocation
|
||||
@ -296,7 +298,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val topLevelBinding =
|
||||
Node.New
|
||||
.TopLevelBinding(emptyModule, binding, dummyLocation)
|
||||
.getOrElse(fail)
|
||||
.getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
topLevelBinding.location shouldEqual dummyLocation
|
||||
@ -332,7 +334,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val argName = Node.New.Empty()
|
||||
val args = Utility.ListOps.from(argName)
|
||||
|
||||
val atomDef = Node.New.AtomDef(name, args, dummyLocation).getOrElse(fail)
|
||||
val atomDef = Node.New.AtomDef(name, args, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
atomDef.location shouldEqual dummyLocation
|
||||
@ -369,7 +371,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val body = Utility.ListOps.from(bodyExpr)
|
||||
|
||||
val typeDef =
|
||||
Node.New.TypeDef(name, tParams, body, dummyLocation).getOrElse(fail)
|
||||
Node.New.TypeDef(name, tParams, body, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
typeDef.location shouldEqual dummyLocation
|
||||
@ -677,7 +679,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val body = Node.New.Empty()
|
||||
|
||||
val functionDef =
|
||||
Node.New.FunctionDef(name, args, body, dummyLocation).getOrElse(fail)
|
||||
Node.New.FunctionDef(name, args, body, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
functionDef.location shouldEqual dummyLocation
|
||||
@ -717,7 +719,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val methodDef =
|
||||
Node.New
|
||||
.MethodDef(targetPath, name, function, dummyLocation)
|
||||
.getOrElse(fail)
|
||||
.getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
methodDef.location shouldEqual dummyLocation
|
||||
@ -767,7 +769,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
|
||||
val arg = Node.New
|
||||
.DefinitionArgument(name, suspended, default, dummyLocation)
|
||||
.getOrElse(fail)
|
||||
.getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
arg.location shouldEqual dummyLocation
|
||||
@ -1008,7 +1010,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val returnVal = Node.New.Empty()
|
||||
|
||||
val block =
|
||||
Node.New.Block(expressions, returnVal, dummyLocation).getOrElse(fail)
|
||||
Node.New.Block(expressions, returnVal, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
block.location shouldEqual dummyLocation
|
||||
@ -1068,7 +1070,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val branches = Utility.ListOps.from(Node.New.Empty())
|
||||
|
||||
val caseExpr =
|
||||
Node.New.CaseExpr(scrutinee, branches, dummyLocation).getOrElse(fail)
|
||||
Node.New.CaseExpr(scrutinee, branches, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
caseExpr.location shouldEqual dummyLocation
|
||||
@ -1234,7 +1236,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
Node.New.ForeignCodeLiteral("lambda x: x + 1", dummyLocation)
|
||||
|
||||
val foreignDefinition =
|
||||
Node.New.ForeignDefinition(language, code, dummyLocation).getOrElse(fail)
|
||||
Node.New.ForeignDefinition(language, code, dummyLocation).getOrElse(fail())
|
||||
|
||||
"have valid fields" in {
|
||||
foreignDefinition.location shouldEqual dummyLocation
|
||||
@ -1371,7 +1373,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
val emptyNode = Node.New.Empty()
|
||||
val nilNode = Node.New.MetaNil()
|
||||
val consNode =
|
||||
Node.New.MetaList(emptyNode, nilNode).getOrElse(fail)
|
||||
Node.New.MetaList(emptyNode, nilNode).getOrElse(fail())
|
||||
|
||||
"be correctly identified" in {
|
||||
Utility.ListOps.is(emptyNode) shouldEqual false
|
||||
@ -1396,7 +1398,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
|
||||
listOfOne.tail.target match {
|
||||
case NodeShape.MetaNil.any(_) =>
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
|
||||
listOfMany.head.target shouldEqual emptyNode1
|
||||
@ -1410,11 +1412,11 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
e3.head.target shouldEqual emptyNode3
|
||||
e3.tail.target match {
|
||||
case NodeShape.MetaNil.any(_) => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1465,7 +1467,7 @@ class SmartConstructorsTest extends CompilerTest {
|
||||
|
||||
link.target match {
|
||||
case NodeShape.Empty.any(_) => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,6 @@ import org.enso.compiler.pass.PassConfiguration._
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis.Graph.{Link, Occurrence}
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis.{Graph, Info}
|
||||
import org.enso.compiler.pass.desugar._
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
|
@ -5,7 +5,6 @@ import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.PassConfiguration._
|
||||
import org.enso.compiler.pass.analyse.{AliasAnalysis, DemandAnalysis}
|
||||
import org.enso.compiler.pass.desugar.{GenerateMethodBodies, OperatorToFunction}
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
import org.enso.interpreter.runtime.scope.LocalScope
|
||||
@ -63,7 +62,10 @@ class DemandAnalysisTest extends CompilerTest {
|
||||
* @return a new inline context
|
||||
*/
|
||||
def mkContext: InlineContext = {
|
||||
InlineContext(localScope = Some(LocalScope.root), freshNameSupply = Some(new FreshNameSupply))
|
||||
InlineContext(
|
||||
localScope = Some(LocalScope.root),
|
||||
freshNameSupply = Some(new FreshNameSupply)
|
||||
)
|
||||
}
|
||||
|
||||
// === The Tests ============================================================
|
||||
|
@ -5,7 +5,6 @@ import org.enso.compiler.context.ModuleContext
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.core.IR.Module.Scope.Definition
|
||||
import org.enso.compiler.pass.desugar.ComplexType
|
||||
import org.enso.compiler.pass.resolve.DocumentationComments
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
|
@ -3,7 +3,7 @@ package org.enso.compiler.test.pass.desugar
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.desugar.{ComplexType, FunctionBinding}
|
||||
import org.enso.compiler.pass.desugar.FunctionBinding
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
@ -130,7 +130,7 @@ class FunctionBindingTest extends CompilerTest {
|
||||
|
||||
"desugar module-level methods" in {
|
||||
val ir =
|
||||
"""
|
||||
"""
|
||||
|foo x =
|
||||
| y -> x + y
|
||||
|""".stripMargin.preprocessModule.desugar
|
||||
|
@ -1,11 +1,11 @@
|
||||
package org.enso.compiler.test.pass.desugar
|
||||
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{InlineContext, ModuleContext}
|
||||
import org.enso.compiler.context.ModuleContext
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.core.IR.Module.Scope.Definition.Method
|
||||
import org.enso.compiler.pass.desugar.GenerateMethodBodies
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.pass.desugar.{FunctionBinding, GenerateMethodBodies}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
class GenerateMethodBodiesTest extends CompilerTest {
|
||||
|
@ -1,10 +1,10 @@
|
||||
package org.enso.compiler.test.pass.desugar
|
||||
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.context.{FreshNameSupply, InlineContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.desugar.SectionsToBinOp
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.pass.desugar.{GenerateMethodBodies, SectionsToBinOp}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
class SectionsToBinOpTest extends CompilerTest {
|
||||
@ -106,7 +106,8 @@ class SectionsToBinOpTest extends CompilerTest {
|
||||
.asInstanceOf[IR.CallArgument.Specified]
|
||||
.value
|
||||
.asInstanceOf[IR.Name.Literal]
|
||||
val lamBodySecondArg = lamBody.arguments(1)
|
||||
val lamBodySecondArg = lamBody
|
||||
.arguments(1)
|
||||
.asInstanceOf[IR.CallArgument.Specified]
|
||||
.value
|
||||
.asInstanceOf[IR.Name.Literal]
|
||||
|
@ -5,7 +5,6 @@ import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.PassConfiguration._
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis
|
||||
import org.enso.compiler.pass.desugar.OperatorToFunction
|
||||
import org.enso.compiler.pass.optimise.ApplicationSaturation
|
||||
import org.enso.compiler.pass.optimise.ApplicationSaturation.{
|
||||
CallSaturation,
|
||||
@ -68,7 +67,11 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
|
||||
val localScope: Option[LocalScope] = Some(LocalScope.root)
|
||||
|
||||
val knownCtx = new InlineContext(localScope = localScope, freshNameSupply = Some(new FreshNameSupply), passConfiguration = Some(knownPassConfig))
|
||||
val knownCtx = new InlineContext(
|
||||
localScope = localScope,
|
||||
freshNameSupply = Some(new FreshNameSupply),
|
||||
passConfiguration = Some(knownPassConfig)
|
||||
)
|
||||
|
||||
val moduleCtx = new ModuleContext(
|
||||
passConfiguration = Some(knownPassConfig),
|
||||
@ -124,7 +127,7 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
|
||||
resultIR.getMetadata(ApplicationSaturation).foreach {
|
||||
case _: CallSaturation.Exact => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
|
||||
@ -238,13 +241,13 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
// The outer should be reported as fully saturated
|
||||
result.getMetadata(ApplicationSaturation).foreach {
|
||||
case _: CallSaturation.Exact => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
|
||||
// The inner should be reported as fully saturated
|
||||
result.getInnerMetadata.foreach {
|
||||
case _: CallSaturation.Exact => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,7 +262,7 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
// The outer should be reported as fully saturated
|
||||
result.getMetadata(ApplicationSaturation).foreach {
|
||||
case _: CallSaturation.Exact => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
|
||||
// The inner should be reported as under saturateD
|
||||
@ -278,7 +281,7 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
// The outer should be reported as fully saturated
|
||||
result.getMetadata(ApplicationSaturation).foreach {
|
||||
case _: CallSaturation.Exact => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
|
||||
// The inner should be reported as under saturateD
|
||||
@ -312,7 +315,7 @@ class ApplicationSaturationTest extends CompilerTest {
|
||||
.getMetadata(ApplicationSaturation)
|
||||
.foreach {
|
||||
case _: CallSaturation.Unknown => succeed
|
||||
case _ => fail
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.PassConfiguration._
|
||||
import org.enso.compiler.pass.analyse.AliasAnalysis
|
||||
import org.enso.compiler.pass.desugar.{GenerateMethodBodies, OperatorToFunction}
|
||||
import org.enso.compiler.pass.optimise.LambdaConsolidate
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
@ -67,7 +66,11 @@ class LambdaConsolidateTest extends CompilerTest {
|
||||
* @return a default inline context
|
||||
*/
|
||||
def mkContext: InlineContext = {
|
||||
InlineContext(localScope = Some(LocalScope.root), freshNameSupply = Some(new FreshNameSupply), passConfiguration = Some(passConfiguration))
|
||||
InlineContext(
|
||||
localScope = Some(LocalScope.root),
|
||||
freshNameSupply = Some(new FreshNameSupply),
|
||||
passConfiguration = Some(passConfiguration)
|
||||
)
|
||||
}
|
||||
|
||||
// === The Tests ============================================================
|
||||
|
@ -3,36 +3,7 @@ package org.enso.compiler.test.pass.resolve
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{FreshNameSupply, InlineContext, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.analyse.{
|
||||
AliasAnalysis,
|
||||
CachePreferenceAnalysis,
|
||||
DataflowAnalysis,
|
||||
DemandAnalysis,
|
||||
TailCall
|
||||
}
|
||||
import org.enso.compiler.pass.desugar.{
|
||||
ComplexType,
|
||||
FunctionBinding,
|
||||
GenerateMethodBodies,
|
||||
LambdaShorthandToLambda,
|
||||
NestedPatternMatch,
|
||||
OperatorToFunction,
|
||||
SectionsToBinOp
|
||||
}
|
||||
import org.enso.compiler.pass.lint.{ShadowedPatternFields, UnusedBindings}
|
||||
import org.enso.compiler.pass.optimise.{
|
||||
ApplicationSaturation,
|
||||
LambdaConsolidate,
|
||||
UnreachableMatchBranches
|
||||
}
|
||||
import org.enso.compiler.pass.resolve.{
|
||||
DocumentationComments,
|
||||
IgnoredBindings,
|
||||
OverloadsResolution,
|
||||
SuspendedArguments,
|
||||
TypeFunctions,
|
||||
TypeSignatures
|
||||
}
|
||||
import org.enso.compiler.pass.resolve.DocumentationComments
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
import org.scalatest.Inside
|
||||
|
@ -3,7 +3,6 @@ package org.enso.compiler.test.pass.resolve
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{FreshNameSupply, ModuleContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.desugar.{FunctionBinding, GenerateMethodBodies}
|
||||
import org.enso.compiler.pass.resolve.OverloadsResolution
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
@ -3,7 +3,7 @@ package org.enso.compiler.test.pass.resolve
|
||||
import org.enso.compiler.Passes
|
||||
import org.enso.compiler.context.{FreshNameSupply, InlineContext}
|
||||
import org.enso.compiler.core.IR
|
||||
import org.enso.compiler.pass.resolve.{IgnoredBindings, TypeFunctions}
|
||||
import org.enso.compiler.pass.resolve.TypeFunctions
|
||||
import org.enso.compiler.pass.{IRPass, PassConfiguration, PassManager}
|
||||
import org.enso.compiler.test.CompilerTest
|
||||
|
||||
|
@ -38,7 +38,8 @@ case class LocationsInstrumenter(instrument: CodeLocationsTestInstrument) {
|
||||
val listener = binding.getElement
|
||||
if (!listener.isSuccessful) {
|
||||
Assertions.fail(
|
||||
s"Node of type ${listener.getType.getSimpleName} at position ${listener.getStart} with length ${listener.getLength} was not found."
|
||||
s"Node of type ${listener.getType.getSimpleName} at position " +
|
||||
s"${listener.getStart} with length ${listener.getLength} was not found."
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,7 @@
|
||||
package org.enso.interpreter.test.instrument
|
||||
|
||||
import org.enso.interpreter.test.{InterpreterContext, InterpreterTest}
|
||||
import org.enso.polyglot.debugger.{
|
||||
DebugServerInfo,
|
||||
DebuggerSessionManagerEndpoint,
|
||||
ObjectRepresentation,
|
||||
ReplExecutor,
|
||||
SessionManager
|
||||
}
|
||||
import org.enso.polyglot.debugger.{DebugServerInfo, ObjectRepresentation}
|
||||
import org.graalvm.polyglot.Context
|
||||
import org.scalatest.{BeforeAndAfter, EitherValues}
|
||||
|
||||
@ -18,8 +12,8 @@ class ReplTest extends InterpreterTest with BeforeAndAfter with EitherValues {
|
||||
override def contextModifiers: Context#Builder => Context#Builder =
|
||||
_.option(DebugServerInfo.ENABLE_OPTION, "true")
|
||||
|
||||
override def specify(
|
||||
implicit interpreterContext: InterpreterContext
|
||||
override def specify(implicit
|
||||
interpreterContext: InterpreterContext
|
||||
): Unit = {
|
||||
|
||||
"initialize properly" in {
|
||||
|
@ -10,12 +10,7 @@ import org.enso.interpreter.test.Metadata
|
||||
import org.enso.pkg.{Package, PackageManager}
|
||||
import org.enso.polyglot.runtime.Runtime.Api.PushContextResponse
|
||||
import org.enso.polyglot.runtime.Runtime.{Api, ApiRequest}
|
||||
import org.enso.polyglot.{
|
||||
LanguageInfo,
|
||||
PolyglotContext,
|
||||
RuntimeOptions,
|
||||
RuntimeServerInfo
|
||||
}
|
||||
import org.enso.polyglot.{LanguageInfo, PolyglotContext, RuntimeOptions, RuntimeServerInfo}
|
||||
import org.enso.text.editing.model
|
||||
import org.enso.text.editing.model.TextEdit
|
||||
import org.graalvm.polyglot.Context
|
||||
@ -24,6 +19,9 @@ import org.scalatest.BeforeAndAfterEach
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
@nowarn("msg=multiarg infix syntax")
|
||||
class RuntimeServerTest
|
||||
extends AnyFlatSpec
|
||||
with Matchers
|
||||
|
@ -15,6 +15,7 @@ import akka.stream.scaladsl.Flow
|
||||
import akka.stream.scaladsl.Sink
|
||||
import akka.stream.scaladsl.Source
|
||||
|
||||
import scala.annotation.nowarn
|
||||
import scala.concurrent.ExecutionContext
|
||||
import scala.util.Failure
|
||||
import scala.util.Success
|
||||
@ -65,6 +66,7 @@ trait Server {
|
||||
*
|
||||
* The request's URI is not checked.
|
||||
*/
|
||||
@nowarn("cat=deprecation")
|
||||
val handleRequest: HttpRequest => HttpResponse = {
|
||||
case req @ HttpRequest(GET, _, _, _, _) =>
|
||||
req.header[UpgradeToWebSocket] match {
|
||||
|
@ -12,7 +12,7 @@ import scala.concurrent.{Future, Promise}
|
||||
class ZioEnvExec(runtime: Runtime[ZEnv]) extends Exec[ZIO[ZEnv, *, *]] {
|
||||
|
||||
override def exec[E, A](op: ZIO[ZEnv, E, A]): Future[Either[E, A]] = {
|
||||
val promise = Promise[Either[E, A]]
|
||||
val promise = Promise[Either[E, A]]()
|
||||
runtime.unsafeRunAsync(op) {
|
||||
_.fold(
|
||||
{ cause =>
|
||||
|
@ -11,7 +11,7 @@ class StepParent(childProps: Props, probe: ActorRef) extends Actor {
|
||||
override def receive: Receive = {
|
||||
case Terminated(`child`) => probe ! ChildTerminated
|
||||
case GracefulStop => child ! GracefulStop
|
||||
case msg => probe.tell(msg, sender)
|
||||
case msg => probe.tell(msg, sender())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,8 @@ package org.enso.searcher.sql
|
||||
|
||||
import slick.jdbc.SQLiteProfile.api._
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
/** A row in the arguments table.
|
||||
*
|
||||
* @param id the id of an argument
|
||||
@ -54,6 +56,7 @@ object SuggestionKind {
|
||||
}
|
||||
|
||||
/** The schema of the arguments table. */
|
||||
@nowarn("msg=multiarg infix syntax")
|
||||
final class ArgumentsTable(tag: Tag)
|
||||
extends Table[ArgumentRow](tag, "arguments") {
|
||||
|
||||
@ -77,6 +80,7 @@ final class ArgumentsTable(tag: Tag)
|
||||
}
|
||||
|
||||
/** The schema of the suggestions table. */
|
||||
@nowarn("msg=multiarg infix syntax")
|
||||
final class SuggestionsTable(tag: Tag)
|
||||
extends Table[SuggestionRow](tag, "suggestions") {
|
||||
|
||||
|
@ -441,7 +441,7 @@ object Shape extends ShapeImplicit {
|
||||
implicit def repr[T]: Repr[InvalidSuffix[T]] =
|
||||
t => R + t.elem.repr + t.suffix
|
||||
implicit def span[T]: HasSpan[InvalidSuffix[T]] =
|
||||
t => t.elem.span + t.suffix.length
|
||||
t => t.elem.span() + t.suffix.length
|
||||
}
|
||||
object Literal extends IntermediateTrait[Literal] {
|
||||
implicit def ftor: Functor[Literal] = semi.functor
|
||||
@ -708,10 +708,10 @@ object Shape extends ShapeImplicit {
|
||||
t =>
|
||||
t.copy(
|
||||
func = (Index.Start, t.func),
|
||||
arg = (Index(t.func.span + t.off), t.arg)
|
||||
arg = (Index(t.func.span() + t.off), t.arg)
|
||||
)
|
||||
implicit def span[T: HasSpan]: HasSpan[Prefix[T]] =
|
||||
t => t.func.span + t.off + t.arg.span
|
||||
t => t.func.span() + t.off + t.arg.span()
|
||||
|
||||
}
|
||||
object Infix {
|
||||
@ -722,12 +722,12 @@ object Shape extends ShapeImplicit {
|
||||
implicit def ozip[T: HasSpan]: OffsetZip[Infix, T] =
|
||||
t => {
|
||||
val larg = Index.Start -> t.larg
|
||||
val opr = Index(t.larg.span + t.loff) -> t.opr
|
||||
val rarg = Index(t.larg.span + t.loff + t.opr.span + t.roff) -> t.rarg
|
||||
val opr = Index(t.larg.span() + t.loff) -> t.opr
|
||||
val rarg = Index(t.larg.span() + t.loff + t.opr.span() + t.roff) -> t.rarg
|
||||
t.copy(larg = larg, opr = opr, rarg = rarg)
|
||||
}
|
||||
implicit def span[T: HasSpan]: HasSpan[Infix[T]] =
|
||||
t => t.larg.span + t.loff + t.opr.span + t.roff + t.rarg.span
|
||||
t => t.larg.span() + t.loff + t.opr.span() + t.roff + t.rarg.span()
|
||||
}
|
||||
|
||||
object Section extends IntermediateTrait[Section] {
|
||||
@ -744,7 +744,7 @@ object Shape extends ShapeImplicit {
|
||||
implicit def ozip[T]: OffsetZip[SectionLeft, T] =
|
||||
t => t.copy(arg = (Index.Start, t.arg))
|
||||
implicit def span[T: HasSpan]: HasSpan[SectionLeft[T]] =
|
||||
t => t.arg.span + t.off + t.opr.span
|
||||
t => t.arg.span() + t.off + t.opr.span
|
||||
}
|
||||
object SectionRight {
|
||||
implicit def ftor: Functor[SectionRight] = semi.functor
|
||||
@ -754,7 +754,7 @@ object Shape extends ShapeImplicit {
|
||||
implicit def ozip[T]: OffsetZip[SectionRight, T] =
|
||||
t => t.copy(arg = (Index(t.opr.span + t.off), t.arg))
|
||||
implicit def span[T: HasSpan]: HasSpan[SectionRight[T]] =
|
||||
t => t.opr.span + t.off + t.arg.span
|
||||
t => t.opr.span + t.off + t.arg.span()
|
||||
}
|
||||
object SectionSides {
|
||||
implicit def ftor: Functor[SectionSides] = semi.functor
|
||||
@ -785,13 +785,13 @@ object Shape extends ShapeImplicit {
|
||||
index += t.emptyLines.map(_ + 1).sum
|
||||
index += t.indent
|
||||
val line = t.firstLine.copy(elem = (Index(index), t.firstLine.elem))
|
||||
index += t.firstLine.span + newline.span
|
||||
index += t.firstLine.span() + newline.span
|
||||
val lines = for (line <- t.lines) yield {
|
||||
val elem = line.elem.map(elem => {
|
||||
index += t.indent
|
||||
(Index(index), elem)
|
||||
})
|
||||
index += line.span + newline.span
|
||||
index += line.span() + newline.span
|
||||
line.copy(elem = elem)
|
||||
}
|
||||
t.copy(firstLine = line, lines = lines)
|
||||
@ -825,7 +825,7 @@ object Shape extends ShapeImplicit {
|
||||
implicit def fold: Foldable[Line] = semi.foldable
|
||||
implicit def repr[T: Repr]: Repr[Line[T]] = t => R + t.elem + t.off
|
||||
implicit def span[T: HasSpan]: HasSpan[Line[T]] =
|
||||
t => t.elem.span + t.off
|
||||
t => t.elem.span() + t.off
|
||||
implicit def spanOpt[T: HasSpan]: HasSpan[OptLine[T]] =
|
||||
t => t.elem.map(_.span()).getOrElse(0) + t.off
|
||||
}
|
||||
@ -839,7 +839,7 @@ object Shape extends ShapeImplicit {
|
||||
var index = 0
|
||||
val lines = t.lines.map { line =>
|
||||
val elem = line.elem.map((Index(index), _))
|
||||
index += line.span + newline.span
|
||||
index += line.span() + newline.span
|
||||
line.copy(elem = elem)
|
||||
}
|
||||
t.copy(lines = lines)
|
||||
@ -909,7 +909,7 @@ object Shape extends ShapeImplicit {
|
||||
})
|
||||
}
|
||||
implicit def span[T: HasSpan]: HasSpan[Segment[T]] =
|
||||
t => t.head.span + t.body.span
|
||||
t => t.head.span + t.body.span()
|
||||
|
||||
def apply[T](head: AST.Ident): Shape.Match.Segment[T] =
|
||||
Shape.Match.Segment(head, Pattern.Match.Nothing())
|
||||
@ -952,7 +952,7 @@ object Shape extends ShapeImplicit {
|
||||
t => {
|
||||
val symbolRepr = R + symbol + symbol
|
||||
val betweenDocAstRepr =
|
||||
R + newline + newline.build * t.emptyLinesBetween
|
||||
R + newline + newline.build() * t.emptyLinesBetween
|
||||
R + symbolRepr + t.doc + betweenDocAstRepr + t.ast
|
||||
}
|
||||
implicit def offsetZip[T]: OffsetZip[Documented, T] =
|
||||
|
@ -271,48 +271,58 @@ object Pattern {
|
||||
implicit def travMatch: Traverse[MatchOf] = _MatchOf.travMatch
|
||||
implicit def foldMatch: Foldable[MatchOf] = _MatchOf.foldMatch
|
||||
|
||||
implicit def offZipMatch[T: HasSpan]: OffsetZip[MatchOf, T] = t => {
|
||||
val s = t.map(Shifted(0, _))
|
||||
val s2 = mapWithOff(s) { case (i, el) => Shifted(i, el.wrapped) }
|
||||
val s3 = s2.map(t => (Index(t.off), t.wrapped))
|
||||
s3
|
||||
}
|
||||
implicit def offZipMatch[T: HasSpan]: OffsetZip[MatchOf, T] =
|
||||
t => {
|
||||
val s = t.map(Shifted(0, _))
|
||||
val s2 = mapWithOff(s) { case (i, el) => Shifted(i, el.wrapped) }
|
||||
val s3 = s2.map(t => (Index(t.off), t.wrapped))
|
||||
s3
|
||||
}
|
||||
|
||||
import HasSpan.implicits._
|
||||
implicit def span[T: HasSpan]: HasSpan[MatchOf[T]] = t => {
|
||||
t.toStream.span()
|
||||
}
|
||||
implicit def span[T: HasSpan]: HasSpan[MatchOf[T]] =
|
||||
t => {
|
||||
t.toStream.span()
|
||||
}
|
||||
|
||||
val M = Match
|
||||
// format: off
|
||||
def mapWithOff[T:HasSpan](self:MatchOf[T])(f: (Int,T) => T): MatchOf[T] =
|
||||
mapWithOff_(self)(f,0)._1
|
||||
def mapWithOff[T: HasSpan](self: MatchOf[T])(f: (Int, T) => T): MatchOf[T] =
|
||||
mapWithOff_(self)(f, 0)._1
|
||||
|
||||
def mapWithOff_[T:HasSpan](self:MatchOf[T])(f: (Int,T) => T, off:Int): (MatchOf[T], Int) = self match {
|
||||
// TODO: [MWU] code below could likely be cleaned up with macro usage
|
||||
case m: M.Build[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Err[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Tok[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Blank[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Var[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Cons[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Opr[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Mod[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Num[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Text[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Block[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Macro[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: M.Invalid[T] => (m.copy(elem = f(off,m.elem)), off + m.elem.span)
|
||||
case m: Pattern.MatchOf[T] =>
|
||||
var loff = off
|
||||
val out = m.mapStructShallow {p =>
|
||||
val (nmatch, noff) = mapWithOff_(p)(f, loff)
|
||||
loff = noff
|
||||
nmatch
|
||||
}
|
||||
(out, loff)
|
||||
}
|
||||
// format: on
|
||||
def mapWithOff_[T: HasSpan](
|
||||
self: MatchOf[T]
|
||||
)(f: (Int, T) => T, off: Int): (MatchOf[T], Int) =
|
||||
self match {
|
||||
// TODO: [MWU] code below could likely be cleaned up with macro usage
|
||||
case m: M.Build[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Err[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Tok[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Blank[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Var[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Cons[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Opr[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Mod[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Num[T] => (m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Text[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Block[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Macro[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: M.Invalid[T] =>
|
||||
(m.copy(elem = f(off, m.elem)), off + m.elem.span())
|
||||
case m: Pattern.MatchOf[T] =>
|
||||
var loff = off
|
||||
val out = m.mapStructShallow { p =>
|
||||
val (nmatch, noff) = mapWithOff_(p)(f, loff)
|
||||
loff = noff
|
||||
nmatch
|
||||
}
|
||||
(out, loff)
|
||||
}
|
||||
}
|
||||
object _MatchOf {
|
||||
def ftorMatch: Functor[MatchOf] = semi.functor
|
||||
|
@ -62,7 +62,7 @@ case class ParserDef() extends flexer.Parser[AST.Module] {
|
||||
def pop(): Unit = logger.trace {
|
||||
current = stack.head
|
||||
stack = stack.tail
|
||||
logger.log(s"New result: ${current.map(_.show).getOrElse("None")}")
|
||||
logger.log(s"New result: ${current.map(_.show()).getOrElse("None")}")
|
||||
}
|
||||
|
||||
def app(fn: String => AST): Unit =
|
||||
|
@ -14,7 +14,7 @@ class CommentRemover(input: String) {
|
||||
val result = new StringBuilder(input.length)
|
||||
|
||||
while (it.hasNext) {
|
||||
val char: Char = it.next
|
||||
val char: Char = it.next()
|
||||
if (char != '\n' && currentState == CommentRemover.InComment) {
|
||||
result.addOne(' ')
|
||||
} else if (currentState == CommentRemover.Base && char == '#') {
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flexer"
|
||||
version = "0.0.1"
|
||||
version = "0.1.0"
|
||||
authors = [
|
||||
"Enso Team <enso-dev@enso.org>",
|
||||
"Ara Adkins <ara.adkins@enso.org"
|
||||
@ -14,14 +14,19 @@ repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../LICENSE"
|
||||
|
||||
keywords = ["lexer", "finite-automata"]
|
||||
categories = ["parsing"]
|
||||
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "flexer"
|
||||
crate-type = ["dylib", "rlib"]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
test = true
|
||||
bench = true
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.8"
|
||||
wasm-bindgen = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.2"
|
||||
|
@ -1,2 +1,2 @@
|
||||
sbt.version=1.3.10
|
||||
sbt.version=1.3.13
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10")
|
||||
addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.4.0-RC1")
|
||||
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
|
||||
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.0.1")
|
||||
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.1.0")
|
Loading…
Reference in New Issue
Block a user