Port //language-support/scala to Scala 2.13 (#8442)

The one thing that is still missing is making the generated Scala code
from the codegen compatible with Scala 2.13 so the examples are
excluded for now.

changelog_begin
changelog_end
This commit is contained in:
Moritz Kiefer 2021-01-08 20:28:15 +01:00 committed by GitHub
parent 3f54258a22
commit 3a8f5e8aaa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 235 additions and 169 deletions

View File

@ -199,11 +199,14 @@ def _wrap_rule(
deps = [],
scala_deps = [],
versioned_scala_deps = {},
runtime_deps = [],
scala_runtime_deps = [],
exports = [],
scala_exports = [],
silent_annotations = False,
**kwargs):
deps = deps + ["{}_{}".format(d, scala_major_version_suffix) for d in scala_deps + versioned_scala_deps.get(scala_major_version, [])]
runtime_deps = runtime_deps + ["{}_{}".format(d, scala_major_version_suffix) for d in scala_runtime_deps]
exports = exports + ["{}_{}".format(d, scala_major_version_suffix) for d in scala_exports]
if silent_annotations:
scalacopts = ["-P:silencer:checkUnused"] + scalacopts
@ -216,6 +219,7 @@ def _wrap_rule(
scalacopts = common_scalacopts + plugin_scalacopts + scalacopts,
plugins = common_plugins + plugins,
deps = deps,
runtime_deps = runtime_deps,
**kwargs
)

View File

@ -166,12 +166,17 @@ jobs:
eval "$(./dev-env/bin/dade-assist)"
bazel build --config scala_2_13 -- \
//libs-scala/... \
//daml-lf/...
//daml-lf/... \
//language-support/scala/... \
-//language-support/scala/examples/... \
-//language-support/scala/codegen-sample-app/...
# gatling-utils tests fail with a ClassNotFoundException for scala.collection.SeqLike
bazel test --config scala_2_13 -- \
//libs-scala/... \
//daml-lf/... \
-//libs-scala/gatling-utils/...
-//libs-scala/gatling-utils/... \
-//language-support/scala/examples/... \
-//language-support/scala/codegen-sample-app/...
displayName: 'Build'
- template: tell-slack-failed.yml
parameters:

View File

@ -12,16 +12,16 @@ da_scala_library(
srcs = glob([
"src/main/scala/com/digitalasset/assistant/**/*.scala",
]),
scala_deps = [
"@maven//:io_circe_circe_core",
"@maven//:io_circe_circe_yaml",
"@maven//:org_typelevel_cats_core",
],
tags = ["maven_coordinates=com.daml:daml-sdk-project-config:__VERSION__"],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:ch_qos_logback_logback_classic",
],
deps = [
"@maven//:io_circe_circe_core_2_12",
"@maven//:io_circe_circe_yaml_2_12",
"@maven//:org_typelevel_cats_core_2_12",
],
)
da_scala_test_suite(
@ -31,8 +31,10 @@ da_scala_test_suite(
"src/test/**/*Spec.scala",
"src/test/**/*Test.scala",
]),
scala_deps = [
"@maven//:org_scalatest_scalatest",
],
deps = [
":scala-daml-project-config",
"@maven//:org_scalatest_scalatest_2_12",
],
)

View File

@ -114,7 +114,7 @@ object ProjectConfig {
/** Returns the path of the current daml project config file, if any.
* The path is given by environment variables set by the SDK Assistant. */
def projectConfigPath(): Either[ConfigLoadingError, File] =
projectPath.flatMap(path =>
projectPath().flatMap(path =>
Try(new File(path, projectConfigName)).toEither.left.map(t => ConfigMissing(t.getMessage)))
/** Loads a project configuration from a string */
@ -147,7 +147,7 @@ object ProjectConfig {
* This is the preferred way of loading the SDK project configuration. */
def loadFromEnv(): Either[ConfigLoadingError, ProjectConfig] = {
for {
path <- projectConfigPath
path <- projectConfigPath()
result <- loadFromFile(path)
} yield result
}

View File

@ -10,15 +10,17 @@ load(
da_scala_library(
name = "codegen-common",
srcs = glob(["src/main/**/*.scala"]),
scala_deps = [
"@maven//:com_github_scopt_scopt",
"@maven//:io_circe_circe_core",
"@maven//:org_typelevel_cats_core",
],
tags = ["maven_coordinates=com.daml:codegen-jvm-common:__VERSION__"],
visibility = ["//visibility:public"],
deps = [
"//daml-assistant/scala-daml-project-config",
"//libs-scala/build-info",
"@maven//:ch_qos_logback_logback_classic",
"@maven//:com_github_scopt_scopt_2_12",
"@maven//:io_circe_circe_core_2_12",
"@maven//:org_typelevel_cats_core_2_12",
],
)

View File

@ -10,21 +10,25 @@ da_scala_library(
name = "bindings-akka-testing",
srcs = glob(["src/main/scala/**/*.scala"]),
resources = glob(["src/main/resources/**/*"]),
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:com_typesafe_scala_logging_scala_logging",
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
],
scala_runtime_deps = [
"@maven//:com_typesafe_akka_akka_slf4j",
],
visibility = [
"//visibility:public",
],
exports = [],
runtime_deps = [
"@maven//:ch_qos_logback_logback_classic",
"@maven//:com_typesafe_akka_akka_slf4j_2_12",
],
deps = [
"//ledger-api/rs-grpc-bridge",
"@maven//:com_typesafe_akka_akka_actor_2_12",
"@maven//:com_typesafe_akka_akka_stream_2_12",
"@maven//:com_typesafe_config",
"@maven//:com_typesafe_scala_logging_scala_logging_2_12",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
],
)

View File

@ -81,13 +81,20 @@ da_scala_test_suite(
"src/test/**/*.scala",
],
),
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:com_typesafe_scala_logging_scala_logging",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalaz_scalaz_core",
],
scala_runtime_deps = [
"@maven//:com_typesafe_akka_akka_stream_testkit",
],
silent_annotations = True,
visibility = [
"//visibility:public",
],
runtime_deps = [
"@maven//:com_typesafe_akka_akka_stream_testkit_2_12",
],
deps = [
":bindings-akka",
"//language-support/scala/bindings",
@ -95,11 +102,6 @@ da_scala_test_suite(
"//ledger-api/rs-grpc-bridge",
"//ledger/ledger-api-client",
"@maven//:com_google_api_grpc_proto_google_common_protos",
"@maven//:com_typesafe_akka_akka_actor_2_12",
"@maven//:com_typesafe_akka_akka_stream_2_12",
"@maven//:com_typesafe_config",
"@maven//:com_typesafe_scala_logging_scala_logging_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
],
)

View File

@ -47,15 +47,17 @@ da_scala_test_suite(
size = "small",
srcs = glob(["src/test/**/*.scala"]),
plugins = [
"@maven//:org_typelevel_kind_projector_2_12_12",
"@maven//:org_typelevel_kind_projector_{}".format(scala_version_suffix),
],
scala_deps = [
"@maven//:com_chuusai_shapeless",
"@maven//:org_scala_lang_modules_scala_collection_compat",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalatestplus_scalacheck_1_14",
"@maven//:org_scalaz_scalaz_core",
],
deps = [
":bindings",
"@maven//:com_chuusai_shapeless_2_12",
"@maven//:org_scala_lang_modules_scala_collection_compat_2_12",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalatestplus_scalacheck_1_14_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
],
)

View File

@ -5,14 +5,27 @@ load(
"//bazel_tools:scala.bzl",
"da_scala_library",
"da_scala_test_suite",
"silencer_plugin",
)
load("@scala_version//:index.bzl", "scala_major_version", "scala_version_suffix")
da_scala_library(
name = "codegen-testing",
srcs = glob(["src/main/**/*.scala"]),
plugins = [
# Plugins have to be specified as JARs.
"@maven//:org_typelevel_kind_projector_2_12_12",
"@maven//:org_typelevel_kind_projector_{}".format(scala_version_suffix),
silencer_plugin,
],
scala_deps = [
"@maven//:org_scala_lang_modules_scala_collection_compat",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalaz_scalaz_core",
"@maven//:org_scalaz_scalaz_scalacheck_binding",
],
scalacopts = [
# Forced upon us by scalatest
"-P:silencer:lineContentFilters=Stream",
],
visibility = [
"//visibility:public",
@ -21,10 +34,6 @@ da_scala_library(
runtime_deps = [],
deps = [
"//language-support/scala/bindings",
"@maven//:org_scala_lang_modules_scala_collection_compat_2_12",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
"@maven//:org_scalaz_scalaz_scalacheck_binding_2_12",
],
)
@ -42,10 +51,17 @@ testing_utils = [
da_scala_library(
name = "codegen-testing-testing",
srcs = testing_utils,
srcs = testing_utils + glob(["src/test/{}/**/*.scala".format(scala_major_version)]),
plugins = [
# Plugins have to be specified as JARs.
"@maven//:org_typelevel_kind_projector_2_12_12",
"@maven//:org_typelevel_kind_projector_{}".format(scala_version_suffix),
],
scala_deps = [
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalatestplus_scalacheck_1_14",
"@maven//:org_scalaz_scalaz_core",
],
visibility = [
"//visibility:public",
@ -55,11 +71,6 @@ da_scala_library(
deps = [
":codegen-testing",
"//language-support/scala/bindings",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalatestplus_scalacheck_1_14_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
],
)
@ -68,20 +79,22 @@ da_scala_test_suite(
size = "small",
srcs = glob(
[
"src/test/**/*.scala",
"src/test/scala/**/*.scala",
],
exclude = testing_utils,
),
scala_deps = [
"@maven//:com_chuusai_shapeless",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalatestplus_scalacheck_1_14",
"@maven//:org_scalaz_scalaz_core",
],
silent_annotations = True,
deps = [
":codegen-testing",
":codegen-testing-testing",
"//language-support/scala/bindings",
"@maven//:com_chuusai_shapeless_2_12",
"@maven//:org_apache_commons_commons_text",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalatestplus_scalacheck_1_14_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
],
)

View File

@ -0,0 +1,10 @@
// Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.client.binding.test
object CollectionCompat {
type IterableFactory[
CC[X] <: Traversable[X] with collection.generic.GenericTraversableTemplate[X, CC]] =
collection.generic.TraversableFactory[CC]
}

View File

@ -0,0 +1,8 @@
// Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.client.binding.test
object CollectionCompat {
type IterableFactory[+CC[_]] = scala.collection.IterableFactory[CC]
}

View File

@ -19,7 +19,7 @@ class PrimitiveSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPrope
"Primitive types" when {
"defined concretely" should {
"have nice companion aliases" in {
P.List: collection.generic.TraversableFactory[P.List]
P.List: test.CollectionCompat.IterableFactory[P.List]
}
}
"defined abstractly" should {

View File

@ -6,8 +6,6 @@ package com.daml.ledger.client.binding
import encoding.{ValuePrimitiveEncoding, GenEncoding}
import org.scalacheck.{Arbitrary, Gen, Shrink}
import scala.language.higherKinds
private[binding] object ValueGen {
private[binding] sealed abstract class Exists[F[_]] {

View File

@ -5,7 +5,6 @@ package com.daml.ledger.client
package binding
package encoding
import scala.language.higherKinds
import scala.collection.immutable.Map
import org.scalacheck.{Arbitrary, Gen, Shrink}
@ -451,7 +450,7 @@ object LfTypeEncodingSpec {
object RecordFields extends InvariantApply[RecordFields] {
override def xmap[A, Z](fa: RecordFields[A], f: A => Z, g: Z => A): RecordFields[Z] =
fa copy (writers = g andThen fa.writers, reader = fa.reader andThen (_ leftMap (_ map f)))
fa.copy(writers = g andThen fa.writers, reader = fa.reader andThen (_ leftMap (_ map f)))
override def xmapN[A, B, Z](fa: RecordFields[A], fb: RecordFields[B])(f: (A, B) => Z)(
g: Z => (A, B)): RecordFields[Z] =

View File

@ -6,7 +6,9 @@ load(
"da_scala_binary",
"da_scala_library",
"da_scala_test_suite",
"silencer_plugin",
)
load("@scala_version//:index.bzl", "scala_version_suffix")
common_scalacopts = [
"-Xelide-below",
@ -26,9 +28,17 @@ da_scala_library(
),
plugins = [
# Plugins have to be specified as JARs.
"@maven//:org_typelevel_kind_projector_2_12_12",
"@maven//:org_typelevel_kind_projector_{}".format(scala_version_suffix),
silencer_plugin,
],
scala_deps = [
"@maven//:com_typesafe_scala_logging_scala_logging",
"@maven//:org_scalaz_scalaz_core",
"@maven//:org_scala_lang_modules_scala_collection_compat",
],
scalacopts = common_scalacopts + [
"-P:silencer:lineContentFilters=import scala.collection.compat",
],
scalacopts = common_scalacopts,
tags = ["maven_coordinates=com.daml:codegen-scala:__VERSION__"],
visibility = [
"//visibility:public",
@ -43,8 +53,6 @@ da_scala_library(
"//daml-lf/data",
"//daml-lf/interface",
"@maven//:com_google_protobuf_protobuf_java",
"@maven//:com_typesafe_scala_logging_scala_logging_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
"@maven//:org_slf4j_slf4j_api",
],
)
@ -55,6 +63,10 @@ da_scala_binary(
"src/main/scala/com/digitalasset/codegen/Main.scala",
],
main_class = "com.daml.codegen.Main",
scala_deps = [
"@maven//:com_typesafe_scala_logging_scala_logging",
"@maven//:org_scalaz_scalaz_core",
],
tags = ["maven_coordinates=com.daml:codegen-main:__VERSION__"],
visibility = [
"//visibility:public",
@ -64,8 +76,6 @@ da_scala_binary(
"//language-support/codegen-common",
"@maven//:ch_qos_logback_logback_classic",
"@maven//:ch_qos_logback_logback_core",
"@maven//:com_typesafe_scala_logging_scala_logging_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
"@maven//:org_slf4j_slf4j_api",
],
)
@ -74,15 +84,22 @@ da_scala_test_suite(
name = "tests",
size = "small",
srcs = glob(["src/test/scala/**/*.scala"]),
scalacopts = common_scalacopts,
plugins = [silencer_plugin],
scala_deps = [
"@maven//:org_scala_lang_modules_scala_collection_compat",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalatestplus_scalacheck_1_14",
"@maven//:org_scalaz_scalaz_core",
],
scalacopts = common_scalacopts + [
"-P:silencer:lineContentFilters=import scala.collection.compat",
],
deps = [
":codegen",
"//daml-lf/archive:daml_lf_archive_reader",
"//daml-lf/data",
"//daml-lf/interface",
"//daml-lf/transaction-test-lib",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalatestplus_scalacheck_1_14_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
],
)

View File

@ -28,7 +28,7 @@ import scalaz.syntax.std.option._
import scalaz.syntax.bind._
import scalaz.syntax.traverse1._
import scala.collection.breakOut
import scala.collection.compat._
import scala.util.{Failure, Success}
import scala.util.matching.Regex
@ -44,7 +44,7 @@ object CodeGen {
val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
import universe._
import Util.{FilePlan, WriteParams, partitionEithers}
import Util.{FilePlan, WriteParams}
/*
* Given a DAML package (in DAR or DALF format), a package name and an output
@ -180,12 +180,12 @@ object CodeGen {
: List[(Identifier, util.TemplateInterface) Either ScopedDataType.FWT] =
orderedDependencies.deps.toList.flatMap {
case (templateId, Node(TypeDeclWrapper(typeDecl), _, _)) =>
Seq(Right(ScopedDataType fromDefDataType (templateId, typeDecl)))
Seq(Right(ScopedDataType.fromDefDataType(templateId, typeDecl)))
case (templateId, Node(TemplateWrapper(templateInterface), _, _)) =>
Seq(Left((templateId, templateInterface)))
}
partitionEithers(templateIdOrTypeDecls).leftMap(_.toMap)
templateIdOrTypeDecls.partitionMap(identity).leftMap(_.toMap)
}
// Each record/variant has Scala code generated for it individually, unless their names are related
@ -202,7 +202,7 @@ object CodeGen {
private[codegen] def produceTemplateAndTypeFilesLF(
wp: WriteParams[DefTemplateWithRecord.FWT],
util: lf.LFUtil): TraversableOnce[FilePlan] = {
util: lf.LFUtil): IterableOnce[FilePlan] = {
import wp._
// New prep steps for LF codegen
@ -244,7 +244,7 @@ object CodeGen {
List[ScopedDataType[Enum]]
) = {
val (recordAndVariants, enums) = partitionEithers(definitions map {
val (recordAndVariants, enums) = definitions.partitionMap {
case sdt @ ScopedDataType(_, _, ddt) =>
ddt match {
case r: Record[RT] =>
@ -254,9 +254,9 @@ object CodeGen {
case e: Enum =>
Right(sdt copy (dataType = e))
}
})
}
val (records, variants) = partitionEithers(recordAndVariants)
val (records, variants) = recordAndVariants.partitionMap(identity)
(records, variants, enums)
}
@ -279,9 +279,9 @@ object CodeGen {
val (records, variants, enums) = splitNTDs(definitions)
val recordMap: Map[(ScopedDataType.Name, List[Ref.Name]), ScopedDataType[Record[RT]]] =
records.map {
records.view.map {
case ddt @ ScopedDataType(name, vars, _) => (name -> vars.toList) -> ddt
}(breakOut)
}.toMap
val noDeletion = Set.empty[(Identifier, List[Ref.Name])]
val (deletedRecords, newVariants) =
@ -310,7 +310,7 @@ object CodeGen {
private[this] def writeTemplatesAndTypes(util: Util)(
wp: WriteParams[util.TemplateInterface]): Unit = {
util.templateAndTypeFiles(wp) foreach {
util.templateAndTypeFiles(wp).iterator.foreach {
case -\/(msg) => logger.debug(msg)
case \/-((msg, filePath, trees)) =>
msg foreach (m => logger.debug(m))

View File

@ -12,8 +12,6 @@ import com.typesafe.scalalogging.StrictLogging
import org.slf4j.{Logger, LoggerFactory}
import scalaz.Cord
import scala.collection.breakOut
object Main extends StrictLogging {
private val codegenId = "Scala Codegen"
@ -54,8 +52,8 @@ object Main extends StrictLogging {
}
private def darsAndOnePackageName(darMap: Map[Path, Option[String]]): (List[File], String) = {
val dars: List[File] = darMap.keys.map(_.toFile)(breakOut)
val uniquePackageNames: Set[String] = darMap.values.collect { case Some(x) => x }(breakOut)
val dars: List[File] = darMap.keys.view.map(_.toFile).toList
val uniquePackageNames: Set[String] = darMap.values.view.collect { case Some(x) => x }.toSet
uniquePackageNames.toSeq match {
case Seq(packageName) =>
(dars, packageName)

View File

@ -10,9 +10,8 @@ import com.daml.lf.data.ImmArray.ImmArraySeq
import java.io.File
import scala.collection.compat._
import scala.reflect.runtime.universe._
import scala.collection.generic.CanBuildFrom
import scala.collection.TraversableLike
import scalaz.{Tree => _, _}
import scalaz.std.list._
import scalaz.syntax.std.option._
@ -39,7 +38,7 @@ abstract class Util(val packageName: String, val outputDir: File) { self =>
private[codegen] def orderedDependencies(library: Interface)
: OrderedDependencies[Ref.Identifier, TypeDeclOrTemplateWrapper[TemplateInterface]]
def templateAndTypeFiles(wp: WriteParams[TemplateInterface]): TraversableOnce[FilePlan]
def templateAndTypeFiles(wp: WriteParams[TemplateInterface]): IterableOnce[FilePlan]
/**
* Convert the metadataAlias into a [[DamlScalaName]] based on the `codeGenDeclKind`.
@ -99,12 +98,12 @@ abstract class Util(val packageName: String, val outputDir: File) { self =>
override def equals(ojb: Any): Boolean = ojb match {
case that: DamlScalaName =>
that.packageSuffixParts.deep == this.packageSuffixParts.deep && that.name == this.name
that.packageSuffixParts.sameElements(this.packageSuffixParts) && that.name == this.name
case _ =>
false
}
override def hashCode(): Int = (this.packageSuffixParts.deep, this.name).hashCode()
override def hashCode(): Int = (this.packageSuffixParts.toIndexedSeq, this.name).hashCode()
}
/**
@ -211,11 +210,4 @@ object Util {
case _ => None
}
}
/** cf. scalaz.MonadPlus#separate */
private[codegen] def partitionEithers[A, B, Coll, AS, BS](
abs: TraversableLike[Either[A, B], Coll])(
implicit AS: CanBuildFrom[Coll, A, AS],
BS: CanBuildFrom[Coll, B, BS]): (AS, BS) =
(abs collect { case Left(a) => a }, abs collect { case Right(a) => a })
}

View File

@ -14,7 +14,7 @@ import scalaz.syntax.bifoldable._
import scalaz.syntax.foldable._
import scalaz.Bifoldable
import scala.language.higherKinds
import scala.collection.compat._
sealed abstract class DependencyGraph[Iface, TmplI] {
def orderedDependencies(
@ -27,7 +27,7 @@ private final case class LFDependencyGraph(private val util: lf.LFUtil)
: OrderedDependencies[Identifier, TypeDeclOrTemplateWrapper[DefTemplateWithRecord.FWT]] = {
val EnvironmentInterface(decls) = library
// invariant: no type decl name equals any template alias
val typeDeclNodes = decls.to[ImmArraySeq].collect {
val typeDeclNodes = decls.to(ImmArraySeq).collect {
case (qualName, InterfaceType.Normal(typeDecl)) =>
(
qualName,
@ -36,7 +36,7 @@ private final case class LFDependencyGraph(private val util: lf.LFUtil)
symmGenTypeDependencies(typeDecl),
collectDepError = false))
}
val templateNodes = decls.to[ImmArraySeq].collect {
val templateNodes = decls.to(ImmArraySeq).collect {
case (qualName, InterfaceType.Template(typ, tpl)) =>
val recDeps = typ.foldMap(Util.genTypeTopLevelDeclNames)
val choiceAndKeyDeps = tpl.foldMap(Util.genTypeTopLevelDeclNames)

View File

@ -5,6 +5,8 @@ package com.daml.codegen.dependencygraph
import com.daml.codegen.exception.UnsopportedTypeError
import scala.collection.compat._
object Graph {
/**
@ -13,7 +15,7 @@ object Graph {
* member of the cycle first.
*/
def cyclicDependencies[K, A](
internalNodes: TraversableOnce[(K, BaseNode[K, A])],
internalNodes: IterableOnce[(K, BaseNode[K, A])],
roots: Iterable[(K, BaseNode[K, A])]): OrderedDependencies[K, A] = {
type NKA = Node[K, A]
type Seen = Map[K, Boolean]
@ -25,7 +27,7 @@ object Graph {
stack: Set[K],
id: K,
node: NKA): (Seen, Vector[(K, NKA)], Boolean, List[UnsopportedTypeError]) = {
if (seen.isDefinedAt(id) || stack(id)) (seen, Vector(), seen getOrElse (id, false), List())
if (seen.isDefinedAt(id) || stack(id)) (seen, Vector(), seen.getOrElse(id, false), List())
else {
val Node(_, deps, collectError @ _) = node
val (newSeen, newEnts, missing, utes) = visitN(seen, stack + id, deps)
@ -53,12 +55,12 @@ object Graph {
case n: NKA =>
val (newSeen, newEnts, nMissing, newUtes) = visit(seen, stack, k, n)
(
newSeen updated (k, nMissing),
newSeen.updated(k, nMissing),
ents ++ newEnts,
if (nMissing) k :: missing else missing,
newUtes ++ utes)
} getOrElse {
(seen updated (k, true), ents, k :: missing, utes)
(seen.updated(k, true), ents, k :: missing, utes)
}
}

View File

@ -3,8 +3,6 @@
package com.daml.codegen.dependencygraph
import scala.language.higherKinds
import com.daml.lf.iface.DefDataType
import scalaz.{Applicative, Traverse}

View File

@ -12,7 +12,6 @@ import com.daml.lf.data.Ref
import com.typesafe.scalalogging.Logger
import scalaz.{-\/, \/, \/-}
import scala.collection.breakOut
import scala.reflect.runtime.{universe => runUni}
/**
@ -432,11 +431,10 @@ object DamlDataTypeGen {
q"""val ${TermName(s"ev$s")}: $domainApiAlias.encoding.LfEncodable[${TypeName(s)}]""")
val viewsByName: Map[String, TermName] =
fields.zipWithIndex.map { case ((f, _), ix) => f -> TermName(s"view $ix") }(breakOut)
fields.zipWithIndex.view.map { case ((f, _), ix) => f -> TermName(s"view $ix") }.toMap
val recordFieldsByName: Map[String, TermName] =
fields.zipWithIndex.map { case ((f, _), ix) => f -> TermName(s"recordFields $ix") }(
breakOut)
fields.zipWithIndex.view.map { case ((f, _), ix) => f -> TermName(s"recordFields $ix") }.toMap
q"""
implicit def $lfEncodableName[..$typeParams](implicit ..$typeParamEvidences): $domainApiAlias.encoding.LfEncodable[$appliedValueType] =

View File

@ -317,11 +317,11 @@ object LFUtil {
extends Product
with Serializable {
def map[B](f: A => B): TupleNesting[B] =
TupleNesting(run map (_ bimap (f, _ map f)))
TupleNesting(run map (_.bimap(f, _ map f)))
// not tail recursive
def fold[Z](pure: A => Z)(roll: NonEmptyList[Z] => Z): Z =
roll(run map (_ fold (pure, _.fold(pure)(roll))))
roll(run map (_.fold(pure, _.fold(pure)(roll))))
}
/** Group `flat` into the shallowest permissible tree, given that a maximum of

View File

@ -8,7 +8,6 @@ import java.io.File
import com.daml.codegen.Util
import com.daml.lf.data.Ref._
import scala.collection.breakOut
import scala.reflect.runtime.universe._
/** Record and variant source files all refer to this file so that
@ -21,7 +20,7 @@ object PackageIDsGen {
val imports: Seq[Tree] = Seq()
val packageIdsByModule: Map[ModuleName, PackageId] =
util.iface.typeDecls.keys.map(id => (id.qualifiedName.module, id.packageId))(breakOut)
util.iface.typeDecls.keys.view.map(id => (id.qualifiedName.module, id.packageId)).toMap
val packageIdBindings = packageIdsByModule.toSeq.sortBy(_._1.dottedName) map {
case (mn, pid) =>
q"val ${TermName(mn.dottedName)}: _root_.scala.Predef.String = $pid"

View File

@ -8,7 +8,6 @@ import com.daml.lf.data.ImmArray.ImmArraySeq
import com.daml.lf.data.Ref
import com.daml.lf.iface.{DataType, DefDataType}
import scala.language.higherKinds
import scalaz.{Apply, Comonad, Traverse1}
import scalaz.syntax.functor._

View File

@ -3,8 +3,6 @@
package com.daml.codegen.types
import scala.language.higherKinds
import scalaz.{Applicative, Comonad, Order, Traverse, ==>>}
import scalaz.std.tuple._
import scalaz.syntax.apply._
@ -48,7 +46,7 @@ object Namespace {
/** Build a tree from name elements K; the root element is the empty
* name. Invariant: no duplicate List[K]s.
*/
def fromHierarchy[K: Order, V](elts: Traversable[(List[K], V)]): Namespace[K, Option[V]] = {
def fromHierarchy[K: Order, V](elts: Iterable[(List[K], V)]): Namespace[K, Option[V]] = {
val (subs, here) = elts partition (_._1.nonEmpty)
Namespace(
here.headOption map (_._2),

View File

@ -18,7 +18,7 @@ class GraphSpec extends AnyFlatSpec with Matchers {
private[this] def orderedDependencies[K, A](
nodes: Iterable[(K, BaseNode[K, A])]): OrderedDependencies[K, A] =
cyclicDependencies(Traversable.empty, nodes)
cyclicDependencies(Iterable.empty, nodes)
behavior of "Graph.cyclicDependencies"
@ -55,7 +55,7 @@ class GraphSpec extends AnyFlatSpec with Matchers {
val node2 = intNode(2)
val node3 = intNode(3, List(1))
val result = orderedDependencies(Seq(node1, node2, node3)).deps
result should contain inOrder (node1, node3)
result should contain.inOrder(node1, node3)
result should contain(node2)
}
@ -72,11 +72,11 @@ class GraphSpec extends AnyFlatSpec with Matchers {
val node5 = intNode(5)
val result = orderedDependencies(Seq(node1, node2, node3, node4, node5)).deps
result should contain(node5)
result should contain inOrder (node3, node2)
result should contain inOrder (node4, node2)
result should contain inOrder (node4, node3)
result should contain inOrder (node1, node3)
result should contain inOrder (node5, node4)
result should contain.inOrder(node3, node2)
result should contain.inOrder(node4, node2)
result should contain.inOrder(node4, node3)
result should contain.inOrder(node1, node3)
result should contain.inOrder(node5, node4)
}
it should "return error for each unknown dependency" in {

View File

@ -15,6 +15,8 @@ import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import scala.collection.compat._
class UtilTest extends UtilTestHelpers with ScalaCheckDrivenPropertyChecks {
val packageInterface =
@ -49,7 +51,7 @@ class UtilTest extends UtilTestHelpers with ScalaCheckDrivenPropertyChecks {
"partitionEithers" should "equal scalaz separate in simple cases" in forAll {
iis: List[Either[Int, Int]] =>
import scalaz.syntax.monadPlus._, scalaz.std.list._, scalaz.std.either._
Util.partitionEithers(iis) shouldBe iis.separate
iis.partitionMap(identity) shouldBe iis.separate
}
}

View File

@ -100,7 +100,7 @@ class LFUtilSpec extends AnyWordSpec with Matchers with Inside with ScalaCheckPr
case TupleNestingCall(r, s, f) =>
def visit(nesting: TupleNesting[Int], max: Int): Unit = {
nesting.run.size should be <= max
nesting.run.foreach(_ fold (_ => (), visit(_, s)))
nesting.run.foreach(_.fold(_ => (), visit(_, s)))
}
visit(tupleNesting(f, r, s), r)
}

View File

@ -23,13 +23,15 @@ da_scala_library(
da_scala_library(
name = "ledger-resources-test-lib",
srcs = glob(["src/test/lib/scala/**/*.scala"]),
scala_deps = [
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
],
tags = ["maven_coordinates=com.daml:ledger-resources-test-lib:__VERSION__"],
visibility = [
"//visibility:public",
],
deps = [
":ledger-resources",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
],
)

View File

@ -126,10 +126,19 @@ da_scala_library(
da_scala_library(
name = "participant-integration-api-tests-lib",
srcs = glob(["src/test/lib/**/*.scala"]),
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
],
scala_runtime_deps = [
"@maven//:com_typesafe_akka_akka_slf4j",
],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:com_h2database_h2",
"@maven//:com_typesafe_akka_akka_slf4j_2_12",
"@maven//:org_postgresql_postgresql",
],
deps = [
@ -156,16 +165,11 @@ da_scala_library(
"//libs-scala/postgresql-testing",
"//libs-scala/resources",
"//libs-scala/timer-utils",
"@maven//:com_typesafe_akka_akka_actor_2_12",
"@maven//:com_typesafe_akka_akka_stream_2_12",
"@maven//:io_dropwizard_metrics_metrics_core",
"@maven//:io_grpc_grpc_netty",
"@maven//:io_netty_netty_common",
"@maven//:io_netty_netty_handler",
"@maven//:io_netty_netty_transport",
"@maven//:org_scalacheck_scalacheck_2_12",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
],
)

View File

@ -30,7 +30,7 @@ private[dao] trait JdbcLedgerDaoPackagesSpec {
secondUploadResult <- storePackageEntry(
offset2,
packages.map(a => a._1 -> a._2.copy(sourceDescription = Some(secondDescription))))
loadedPackages <- ledgerDao.listLfPackages
loadedPackages <- ledgerDao.listLfPackages()
} yield {
firstUploadResult shouldBe PersistenceResponse.Ok
secondUploadResult shouldBe PersistenceResponse.Ok

View File

@ -38,7 +38,7 @@ private[dao] trait JdbcLedgerDaoPartiesSpec {
_ = response should be(PersistenceResponse.Ok)
parties <- ledgerDao.listKnownParties()
} yield {
parties should contain allOf (alice, bob)
parties should contain.allOf(alice, bob)
}
}
@ -90,7 +90,7 @@ private[dao] trait JdbcLedgerDaoPartiesSpec {
_ = response should be(PersistenceResponse.Ok)
parties <- ledgerDao.getParties(Seq(danParty, eveParty, nonExistentParty))
} yield {
parties should contain only (dan, eve)
parties should contain.only(dan, eve)
}
}

View File

@ -51,7 +51,7 @@ private[dao] trait JdbcLedgerDaoPostCommitValidationSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
originalAttempt.commandId.get -> ok,
duplicateAttempt.commandId.get -> invalid,
)
@ -71,7 +71,7 @@ private[dao] trait JdbcLedgerDaoPostCommitValidationSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
create.commandId.get -> ok,
lookup.commandId.get -> invalid,
)
@ -93,7 +93,7 @@ private[dao] trait JdbcLedgerDaoPostCommitValidationSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
create.commandId.get -> ok,
archive.commandId.get -> ok,
lookup.commandId.get -> invalid,
@ -116,7 +116,7 @@ private[dao] trait JdbcLedgerDaoPostCommitValidationSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
create.commandId.get -> ok,
archive.commandId.get -> ok,
fetch.commandId.get -> invalid,
@ -139,7 +139,7 @@ private[dao] trait JdbcLedgerDaoPostCommitValidationSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
fetch1.commandId.get -> invalid,
divulgence.commandId.get -> ok,
fetch2.commandId.get -> ok,

View File

@ -740,8 +740,6 @@ object JdbcLedgerDaoSuite {
import scalaz.syntax.traverse._
import scalaz.{Free, Monad, NaturalTransformation, Traverse}
import scala.language.higherKinds
implicit final class `TraverseFM Ops`[T[_], A](private val self: T[A]) extends AnyVal {
/** Like `traverse`, but guarantees that

View File

@ -20,6 +20,7 @@ import org.scalatest._
import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.collection.compat._
import scala.concurrent.Future
private[dao] trait JdbcLedgerDaoTransactionTreesSpec
@ -282,6 +283,7 @@ private[dao] trait JdbcLedgerDaoTransactionTreesSpec
// Ensure two sequences of transaction trees are comparable:
// - witnesses do not have to appear in a specific order
private def comparable(txs: Seq[TransactionTree]): Seq[TransactionTree] =
txs.map(tx => tx.copy(eventsById = tx.eventsById.mapValues(_.modifyWitnessParties(_.sorted))))
txs.map(tx =>
tx.copy(eventsById = tx.eventsById.view.mapValues(_.modifyWitnessParties(_.sorted)).toMap))
}

View File

@ -504,9 +504,9 @@ private[dao] trait JdbcLedgerDaoTransactionsSpec extends OptionValues with Insid
val beginOffset = nextOffset()
val commandsWithOffsetGaps: Vector[(Offset, LedgerEntry.Transaction)] =
Vector(singleCreate) ++ offsetGap ++
Vector.fill(2)(singleCreate) ++ offsetGap ++
Vector.fill(3)(singleCreate) ++ offsetGap ++ offsetGap ++
Vector(singleCreate) ++ offsetGap() ++
Vector.fill(2)(singleCreate) ++ offsetGap() ++
Vector.fill(3)(singleCreate) ++ offsetGap() ++ offsetGap() ++
Vector.fill(5)(singleCreate)
val endOffset = nextOffset()

View File

@ -29,7 +29,7 @@ private[dao] trait JdbcLedgerDaoTransactionsWriterSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
create.commandId.get -> ok,
lookup.commandId.get -> ok,
)
@ -47,7 +47,7 @@ private[dao] trait JdbcLedgerDaoTransactionsWriterSpec extends LoneElement {
to <- ledgerDao.lookupLedgerEnd()
completions <- getCompletions(from, to, defaultAppId, Set(alice))
} yield {
completions should contain allOf (
completions should contain.allOf(
create.commandId.get -> ok,
fetch.commandId.get -> ok,
)

View File

@ -112,10 +112,21 @@ genrule(
da_scala_library(
name = "sandbox-classic-scala-tests-lib",
srcs = glob(["src/test/lib/**/*.scala"]),
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:org_playframework_anorm_anorm",
"@maven//:org_playframework_anorm_anorm_tokenizer",
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalaz_scalaz_core",
],
scala_runtime_deps = [
"@maven//:com_typesafe_akka_akka_slf4j",
],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:com_h2database_h2",
"@maven//:com_typesafe_akka_akka_slf4j_2_12",
"@maven//:org_postgresql_postgresql",
],
deps = [
@ -149,14 +160,7 @@ da_scala_library(
"//libs-scala/resources-akka",
"//libs-scala/timer-utils",
"@maven//:ch_qos_logback_logback_classic",
"@maven//:com_typesafe_akka_akka_actor_2_12",
"@maven//:com_typesafe_akka_akka_stream_2_12",
"@maven//:io_dropwizard_metrics_metrics_core",
"@maven//:org_playframework_anorm_anorm_2_12",
"@maven//:org_playframework_anorm_anorm_tokenizer_2_12",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
"@maven//:org_slf4j_slf4j_api",
],
)

View File

@ -67,10 +67,20 @@ da_scala_library(
name = "sandbox-common-scala-tests-lib",
srcs = glob(["src/test/lib/**/*.scala"]),
resources = glob(["src/test/resources/**/*"]),
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:com_typesafe_scala_logging_scala_logging",
"@maven//:org_scalactic_scalactic",
"@maven//:org_scalatest_scalatest",
"@maven//:org_scalaz_scalaz_core",
],
scala_runtime_deps = [
"@maven//:com_typesafe_akka_akka_slf4j",
],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:com_h2database_h2",
"@maven//:com_typesafe_akka_akka_slf4j_2_12",
"@maven//:org_postgresql_postgresql",
],
deps = [
@ -101,13 +111,7 @@ da_scala_library(
"//libs-scala/timer-utils",
"@maven//:ch_qos_logback_logback_classic",
"@maven//:com_auth0_java_jwt",
"@maven//:com_typesafe_akka_akka_actor_2_12",
"@maven//:com_typesafe_akka_akka_stream_2_12",
"@maven//:com_typesafe_scala_logging_scala_logging_2_12",
"@maven//:io_netty_netty_handler",
"@maven//:org_scalactic_scalactic_2_12",
"@maven//:org_scalatest_scalatest_2_12",
"@maven//:org_scalaz_scalaz_core_2_12",
"@maven//:org_slf4j_slf4j_api",
],
)

View File

@ -21,7 +21,7 @@ import org.scalatest.Assertion
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
abstract class CommonCliSpecBase(
protected val cli: SandboxCli,

View File

@ -144,7 +144,7 @@ abstract class ResetServiceITBase
.stub(channel)
.getActiveContracts(GetActiveContractsRequest(ledgerId.unwrap, Some(f)), _))
.all()
.map(_.flatMap(_.activeContracts)(collection.breakOut))
.map(_.view.flatMap(_.activeContracts).toSet)
protected def listPackages(ledgerId: LedgerId): Future[Seq[String]] =
PackageServiceGrpc

View File

@ -29,7 +29,7 @@ object WaitForCompletionsObserver {
final class WaitForCompletionsObserver private (expectedCompletions: Int)
extends StreamObserver[CompletionStreamResponse] {
private val promise = Promise[Unit]
private val promise = Promise[Unit]()
private val counter = new AtomicInteger(0)
val result: Future[Unit] = promise.future

View File

@ -13,9 +13,9 @@ object WithTimeout {
private[this] val timer = new Timer("timeout-timer", true)
def apply[A](t: Duration)(f: => Future[A]): Future[A] = {
val p = Promise[A]
val p = Promise[A]()
timer.schedule(new TimeoutTask(p), t.toMillis)
p.tryCompleteWith(f).future
p.completeWith(f).future
}
}