mirror of
https://github.com/digital-asset/daml.git
synced 2024-11-05 03:56:26 +03:00
use .dars consistently in tests (#484)
This commit is contained in:
parent
d9b35ad1ab
commit
f50cbef297
2
3rdparty/dependencies.digest
vendored
2
3rdparty/dependencies.digest
vendored
@ -1 +1 @@
|
||||
58a4d200186ea6d9d18574e7b0d4ec57f0b45c7a dependencies.yaml
|
||||
066ab1d8914325c016da01ddaef4b881e3b7c83e dependencies.yaml
|
||||
|
5
build.sh
5
build.sh
@ -27,8 +27,11 @@ bazel test -j 200 //... --experimental_execution_log_file "$EXEC_LOG_DIR/test_ex
|
||||
bazel query 'deps(//...)' > /dev/null
|
||||
# Execute Sandbox performance tests if on master
|
||||
# On Jenkins we never run them as BUILD_SOURCEBRANCHNAME isn’t set.
|
||||
# moreover, pass -foe true to make the benchmark fail with a proper
|
||||
# exit code if things go wrong, rather than pretend that everything is
|
||||
# fine.
|
||||
if [[ "${BUILD_SOURCEBRANCHNAME:-master}" = master ]]; then
|
||||
bazel run -- //ledger/sandbox-perf -i1 -f1 -wi 1 -bm avgt -rf json -rff sandbox-perf.json # 1 warmup, 1 iterations in 1 fork
|
||||
bazel run -- //ledger/sandbox-perf -foe true -i1 -f1 -wi 1 -bm avgt -rf csv -rff sandbox-perf.csv # 1 warmup, 1 iterations in 1 fork
|
||||
fi
|
||||
|
||||
# Check that we can load damlc in ghci
|
||||
|
@ -27,14 +27,15 @@ class UniversalArchiveReader[A](
|
||||
parseDalf: InputStream => Try[A]) {
|
||||
import SupportedFileType._
|
||||
|
||||
def readArchive(file: File): Try[Dar[A]] = supportedFileType(file).flatMap {
|
||||
def readFile(file: File): Try[Dar[A]] = supportedFileType(file).flatMap {
|
||||
case DarFile =>
|
||||
bracket(zipFile(file))(close).flatMap(parseDar)
|
||||
case DalfFile =>
|
||||
bracket(inputStream(file))(close).flatMap(parseDalf).map(Dar(_, List.empty))
|
||||
}
|
||||
|
||||
private def zipFile(f: File): Try[ZipFile] = Try(new ZipFile(f))
|
||||
private def zipFile(f: File): Try[ZipFile] =
|
||||
Try(new ZipFile(f))
|
||||
|
||||
private def inputStream(f: File): Try[InputStream] =
|
||||
Try(new BufferedInputStream(new FileInputStream(f)))
|
||||
|
@ -18,19 +18,19 @@ class UniversalArchiveReaderSpec extends FlatSpec with Matchers with Inside {
|
||||
behavior of classOf[UniversalArchiveReader[_]].getSimpleName
|
||||
|
||||
it should "parse a DAR file" in {
|
||||
assertSuccess(UniversalArchiveReader().readArchive(darFile))
|
||||
assertSuccess(UniversalArchiveReader().readFile(darFile))
|
||||
}
|
||||
|
||||
it should "parse a DALF file" in {
|
||||
assertSuccess(UniversalArchiveReader().readArchive(dalfFile))
|
||||
assertSuccess(UniversalArchiveReader().readFile(dalfFile))
|
||||
}
|
||||
|
||||
it should "parse a DAR file and return language version" in {
|
||||
assertSuccess(UniversalArchiveReaderWithVersion().readArchive(darFile))
|
||||
assertSuccess(UniversalArchiveReaderWithVersion().readFile(darFile))
|
||||
}
|
||||
|
||||
it should "parse a DALF file and return language version" in {
|
||||
assertSuccess(UniversalArchiveReaderWithVersion().readArchive(dalfFile))
|
||||
assertSuccess(UniversalArchiveReaderWithVersion().readFile(dalfFile))
|
||||
}
|
||||
|
||||
private def assertSuccess[A](value: Try[Dar[A]]): Unit = {
|
||||
|
@ -37,11 +37,10 @@ da_scala_test(
|
||||
name = "tests",
|
||||
timeout = "moderate",
|
||||
srcs = glob(["src/test/**/*.scala"]),
|
||||
resources = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//daml-lf/tests:BasicTests.dalf",
|
||||
"//daml-lf/tests:LargeTransaction.dalf",
|
||||
"//daml-lf/tests:Optional.dalf",
|
||||
data = [
|
||||
"//daml-lf/tests:BasicTests.dar",
|
||||
"//daml-lf/tests:LargeTransaction.dar",
|
||||
"//daml-lf/tests:Optional.dar",
|
||||
],
|
||||
scalacopts = lf_scalacopts,
|
||||
deps = [
|
||||
|
@ -3,6 +3,8 @@
|
||||
|
||||
package com.digitalasset.daml.lf.engine
|
||||
|
||||
import java.io.File
|
||||
|
||||
import com.digitalasset.daml.lf.data.Ref._
|
||||
import com.digitalasset.daml.lf.data.{FrontStack, ImmArray, Ref, Time}
|
||||
import com.digitalasset.daml.lf.lfpackage.Ast._
|
||||
@ -12,9 +14,11 @@ import com.digitalasset.daml.lf.transaction.Node._
|
||||
import com.digitalasset.daml.lf.transaction.{GenTransaction => GenTx, Transaction => Tx}
|
||||
import com.digitalasset.daml.lf.value.Value
|
||||
import Value._
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import com.digitalasset.daml.lf.value.ValueVersions.assertAsVersionedValue
|
||||
import org.scalatest.{Matchers, WordSpec}
|
||||
import scalaz.std.either._, scalaz.syntax.apply._ // above
|
||||
import scalaz.std.either._
|
||||
import scalaz.syntax.apply._
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
||||
@ -33,20 +37,19 @@ class EngineTest extends WordSpec with Matchers {
|
||||
private val clara = SimpleString.assertFromString("Clara")
|
||||
private val party = SimpleString.assertFromString("Party")
|
||||
|
||||
private val loader = Thread.currentThread().getContextClassLoader()
|
||||
|
||||
private def loadPackage(resource: String): (PackageId, Package) = {
|
||||
|
||||
if (loader.getResource(resource) == null) {
|
||||
sys.error(s"Could not find resource $resource")
|
||||
}
|
||||
Decode.decodeArchiveFromInputStream(loader.getResourceAsStream(resource))
|
||||
private def loadPackage(resource: String): (PackageId, Package, Map[PackageId, Package]) = {
|
||||
val packages =
|
||||
UniversalArchiveReader().readFile(new File(resource)).get
|
||||
val packagesMap = Map(packages.all.map {
|
||||
case (pkgId, pkgArchive) => Decode.readArchivePayloadAndVersion(pkgId, pkgArchive)._1
|
||||
}: _*)
|
||||
val (mainPkgId, mainPkgArchive) = packages.main
|
||||
val mainPkg = Decode.readArchivePayloadAndVersion(mainPkgId, mainPkgArchive)._1._2
|
||||
(mainPkgId, mainPkg, packagesMap)
|
||||
}
|
||||
|
||||
private val (basicTestsPkgId, basicTestsPkg) =
|
||||
loadPackage("daml-lf/tests/BasicTests.dalf")
|
||||
private val (ghcPrimPkgId, ghcPrimPkg) =
|
||||
loadPackage("daml-foundations/daml-ghc/package-database/deprecated/daml-prim-1.3.dalf")
|
||||
private val (basicTestsPkgId, basicTestsPkg, allPackages) = loadPackage(
|
||||
"daml-lf/tests/BasicTests.dar")
|
||||
|
||||
private[this] def makeAbsoluteContractId(coid: ContractId): AbsoluteContractId =
|
||||
coid match {
|
||||
@ -90,11 +93,7 @@ class EngineTest extends WordSpec with Matchers {
|
||||
}
|
||||
|
||||
def lookupPackage(pkgId: PackageId): Option[Package] = {
|
||||
pkgId match {
|
||||
case `basicTestsPkgId` => Some(basicTestsPkg)
|
||||
case `ghcPrimPkgId` => Some(ghcPrimPkg)
|
||||
case _ => None
|
||||
}
|
||||
allPackages.get(pkgId)
|
||||
}
|
||||
|
||||
def lookupKey(@deprecated("", "") key: GlobalKey): Option[AbsoluteContractId] =
|
||||
@ -215,8 +214,8 @@ class EngineTest extends WordSpec with Matchers {
|
||||
}
|
||||
|
||||
"translate Optional values" in {
|
||||
val (optionalPkgId, optionalPkg) =
|
||||
loadPackage("daml-lf/tests/Optional.dalf")
|
||||
val (optionalPkgId, optionalPkg @ _, allOptionalPackages) =
|
||||
loadPackage("daml-lf/tests/Optional.dar")
|
||||
|
||||
val translator = CommandTranslation(ConcurrentCompiledPackages.apply())
|
||||
|
||||
@ -229,26 +228,16 @@ class EngineTest extends WordSpec with Matchers {
|
||||
)
|
||||
val typ = TTyConApp(id, ImmArray.empty)
|
||||
|
||||
def lookupOptionalPackage(pkgId: PackageId): Option[Package] = {
|
||||
if (pkgId == optionalPkgId) {
|
||||
Some(optionalPkg)
|
||||
} else if (pkgId == ghcPrimPkgId) {
|
||||
Some(ghcPrimPkg)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
translator
|
||||
.translateValue(typ, someValue)
|
||||
.consume(lookupContract, lookupOptionalPackage, lookupKey) shouldBe
|
||||
.consume(lookupContract, allOptionalPackages.get, lookupKey) shouldBe
|
||||
Right(
|
||||
ERecCon(
|
||||
TypeConApp(id, ImmArray.empty),
|
||||
ImmArray("recField" -> ESome(TBuiltin(BTText), EPrimLit(PLText("foo"))))))
|
||||
translator
|
||||
.translateValue(typ, noneValue)
|
||||
.consume(lookupContract, lookupOptionalPackage, lookupKey) shouldBe
|
||||
.consume(lookupContract, allOptionalPackages.get, lookupKey) shouldBe
|
||||
Right(
|
||||
ERecCon(TypeConApp(id, ImmArray.empty), ImmArray("recField" -> ENone(TBuiltin(BTText)))))
|
||||
}
|
||||
|
@ -3,13 +3,17 @@
|
||||
|
||||
package com.digitalasset.daml.lf.engine
|
||||
|
||||
import com.digitalasset.daml.lf.data.Ref.{Identifier, PackageId, SimpleString, QualifiedName}
|
||||
import java.io.File
|
||||
|
||||
import com.digitalasset.daml.lf.data.Ref.{Identifier, PackageId, QualifiedName, SimpleString}
|
||||
import com.digitalasset.daml.lf.data.{FrontStack, ImmArray, Time}
|
||||
import com.digitalasset.daml.lf.lfpackage.{Ast, Decode}
|
||||
import com.digitalasset.daml.lf.transaction.Transaction.Transaction
|
||||
import com.digitalasset.daml.lf.transaction.{Node => N, Transaction => Tx}
|
||||
import com.digitalasset.daml.lf.value.Value
|
||||
import Value._
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import com.digitalasset.daml.lf.lfpackage.Ast.Package
|
||||
import com.digitalasset.daml.lf.value.ValueVersions.assertAsVersionedValue
|
||||
import org.scalameter
|
||||
import org.scalameter.Quantity
|
||||
@ -18,24 +22,23 @@ import org.scalatest.{Assertion, Matchers, WordSpec}
|
||||
@SuppressWarnings(Array("org.wartremover.warts.Any"))
|
||||
class LargeTransactionTest extends WordSpec with Matchers {
|
||||
|
||||
private[this] val largeTransactionId = "daml-lf/tests/LargeTransaction.dalf"
|
||||
private[this] val ghcPrimId =
|
||||
"daml-foundations/daml-ghc/package-database/deprecated/daml-prim-1.3.dalf"
|
||||
private def loadPackage(resource: String): (PackageId, Package, Map[PackageId, Package]) = {
|
||||
val packages =
|
||||
UniversalArchiveReader().readFile(new File(resource)).get
|
||||
val packagesMap = Map(packages.all.map {
|
||||
case (pkgId, pkgArchive) => Decode.readArchivePayloadAndVersion(pkgId, pkgArchive)._1
|
||||
}: _*)
|
||||
val (mainPkgId, mainPkgArchive) = packages.main
|
||||
val mainPkg = Decode.readArchivePayloadAndVersion(mainPkgId, mainPkgArchive)._1._2
|
||||
(mainPkgId, mainPkg, packagesMap)
|
||||
}
|
||||
|
||||
private[this] val largeTx: (PackageId, Ast.Package) = loadArchiveAsResource(largeTransactionId)
|
||||
private[this] val ghcPrim: (PackageId, Ast.Package) =
|
||||
loadArchiveAsResource(ghcPrimId)
|
||||
private[this] val allPackages: Map[PackageId, Ast.Package] = List(largeTx, ghcPrim).toMap
|
||||
private[this] val (largeTxId, largeTxPkg, allPackages) = loadPackage(
|
||||
"daml-lf/tests/LargeTransaction.dar")
|
||||
private[this] val largeTx = (largeTxId, largeTxPkg)
|
||||
|
||||
private[this] val party = SimpleString.assertFromString("party")
|
||||
|
||||
private def loadArchiveAsResource(resourceId: String): (PackageId, Ast.Package) = {
|
||||
val loader = this.getClass.getClassLoader
|
||||
Option(loader.getResourceAsStream(resourceId))
|
||||
.map(Decode.decodeArchiveFromInputStream)
|
||||
.getOrElse(sys.error(s"Could not find resource: $resourceId"))
|
||||
}
|
||||
|
||||
private def lookupPackage(pkgId: PackageId): Option[Ast.Package] = allPackages.get(pkgId)
|
||||
|
||||
private def report(name: String, quantity: Quantity[Double]): Unit =
|
||||
|
@ -14,10 +14,10 @@ import com.digitalasset.daml.lf.speedy.SResult._
|
||||
import com.digitalasset.daml.lf.types.Ledger
|
||||
import com.digitalasset.daml.lf.value.Value
|
||||
import Value._
|
||||
import java.io.{FileInputStream, PrintWriter, StringWriter}
|
||||
import java.io.{File, PrintWriter, StringWriter}
|
||||
import java.nio.file.{Path, Paths}
|
||||
|
||||
import com.digitalasset.daml.lf.PureCompiledPackages
|
||||
import com.digitalasset.daml.lf.{PureCompiledPackages, UniversalArchiveReader}
|
||||
import com.digitalasset.daml.lf.validation.Validation
|
||||
import org.jline.builtins.Completers
|
||||
import org.jline.reader.{History, LineReader, LineReaderBuilder}
|
||||
@ -33,24 +33,24 @@ object Main extends App {
|
||||
def usage(): Unit = {
|
||||
println(
|
||||
"""
|
||||
|usage: daml-lf-speedy [--decode-lfdev] COMMAND ARGS...
|
||||
|usage: daml-lf-speedy COMMAND ARGS...
|
||||
|
|
||||
|commands:
|
||||
| repl [file ...] Run the interactive repl. Load the given packages if any.
|
||||
| test <name> [file ...] Load given packages and run the named scenario with verbose output.
|
||||
| testAll [file ...] Load the given packages and run all scenarios.
|
||||
| validate [file ...] Load the given packages and validate them.
|
||||
| [file ...] Same as 'repl' when all given files exist.
|
||||
| repl [file] Run the interactive repl. Load the given packages if any.
|
||||
| test <name> [file] Load given packages and run the named scenario with verbose output.
|
||||
| testAll [file] Load the given packages and run all scenarios.
|
||||
| validate [file] Load the given packages and validate them.
|
||||
| [file] Same as 'repl' when all given files exist.
|
||||
""".stripMargin)
|
||||
|
||||
}
|
||||
|
||||
def defaultCommand(allowDev: Boolean, possibleFiles: List[String]): Unit = {
|
||||
if (possibleFiles.exists(Paths.get(_).toFile.isFile == false)) {
|
||||
def defaultCommand(possibleFile: String): Unit = {
|
||||
if (!Paths.get(possibleFile).toFile.isFile) {
|
||||
usage()
|
||||
System.exit(1)
|
||||
} else
|
||||
Repl.repl(allowDev, possibleFiles)
|
||||
Repl.repl(possibleFile)
|
||||
}
|
||||
|
||||
if (args.isEmpty) {
|
||||
@ -68,15 +68,18 @@ object Main extends App {
|
||||
replArgs match {
|
||||
case "-h" :: _ => usage()
|
||||
case "--help" :: _ => usage()
|
||||
case "repl" :: files => Repl.repl(allowDev, files)
|
||||
case "testAll" :: files =>
|
||||
if (!Repl.testAll(allowDev, files)._1) System.exit(1)
|
||||
case "test" :: id :: files =>
|
||||
if (!Repl.test(allowDev, id, files)._1) System.exit(1)
|
||||
case "validate" :: files =>
|
||||
if (!Repl.validate(allowDev, files)._1) System.exit(1)
|
||||
case possibleFiles =>
|
||||
defaultCommand(allowDev, possibleFiles)
|
||||
case List("repl", file) => Repl.repl(file)
|
||||
case List("testAll", file) =>
|
||||
if (!Repl.testAll(allowDev, file)._1) System.exit(1)
|
||||
case List("test", id, file) =>
|
||||
if (!Repl.test(allowDev, id, file)._1) System.exit(1)
|
||||
case List("validate", file) =>
|
||||
if (!Repl.validate(allowDev, file)._1) System.exit(1)
|
||||
case List(possibleFile) =>
|
||||
defaultCommand(possibleFile)
|
||||
case _ =>
|
||||
usage()
|
||||
System.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -88,8 +91,8 @@ object Main extends App {
|
||||
))
|
||||
object Repl {
|
||||
|
||||
def repl(allowDev: Boolean): Unit = repl(initialState(allowDev))
|
||||
def repl(allowDev: Boolean, files: List[String]): Unit = repl(load(initialState(allowDev), files))
|
||||
def repl(): Unit = repl(initialState())
|
||||
def repl(darFile: String): Unit = repl(load(darFile))
|
||||
def repl(state0: State): Unit = {
|
||||
var state = state0
|
||||
state.history.load
|
||||
@ -105,20 +108,20 @@ object Repl {
|
||||
state.history.save
|
||||
}
|
||||
|
||||
def testAll(allowDev: Boolean, files: List[String]): (Boolean, State) = {
|
||||
val state = load(initialState(allowDev), files)
|
||||
def testAll(allowDev: Boolean, file: String): (Boolean, State) = {
|
||||
val state = load(file)
|
||||
cmdValidate(state)
|
||||
cmdTestAll(state)
|
||||
}
|
||||
|
||||
def test(allowDev: Boolean, id: String, files: List[String]): (Boolean, State) = {
|
||||
val state = load(initialState(allowDev), files)
|
||||
def test(allowDev: Boolean, id: String, file: String): (Boolean, State) = {
|
||||
val state = load(file)
|
||||
cmdValidate(state)
|
||||
invokeScenario(state, Seq(id))
|
||||
}
|
||||
|
||||
def validate(allowDev: Boolean, files: List[String]): (Boolean, State) = {
|
||||
val state = load(initialState(allowDev), files)
|
||||
def validate(allowDev: Boolean, file: String): (Boolean, State) = {
|
||||
val state = load(file)
|
||||
cmdValidate(state)
|
||||
}
|
||||
|
||||
@ -135,7 +138,6 @@ object Repl {
|
||||
// --------------------------------------------------------
|
||||
|
||||
case class State(
|
||||
allowDev: Boolean,
|
||||
packages: Map[PackageId, Package],
|
||||
packageFiles: Seq[String],
|
||||
scenarioRunner: ScenarioRunnerHelper,
|
||||
@ -159,9 +161,7 @@ object Repl {
|
||||
|
||||
final val commands = ListMap(
|
||||
":help" -> Command("show this help", (s, _) => { usage(); s }),
|
||||
":load" -> Command("load packages from one or more DAML-LF files.", load),
|
||||
":reset" -> Command("reset the REPL.", (s, _) => initialState(s.allowDev)),
|
||||
":reload" -> Command("reload all loaded packages.", (s, _) => reload(s)),
|
||||
":reset" -> Command("reset the REPL.", (s, _) => initialState()),
|
||||
":list" -> Command("list loaded packages.", (s, _) => { list(s); s }),
|
||||
":speedy" -> Command("compile given expression to speedy and print it", (s, args) => {
|
||||
speedyCompile(s, args); s
|
||||
@ -192,10 +192,9 @@ object Repl {
|
||||
cmpl
|
||||
}
|
||||
|
||||
def initialState(allowDev: Boolean): State =
|
||||
def initialState(): State =
|
||||
rebuildReader(
|
||||
State(
|
||||
allowDev = allowDev,
|
||||
packages = Map.empty,
|
||||
packageFiles = Seq(),
|
||||
ScenarioRunnerHelper(Map.empty),
|
||||
@ -240,12 +239,6 @@ object Repl {
|
||||
}
|
||||
}
|
||||
|
||||
def reload(state: State): State = {
|
||||
val newState = load(initialState(state.allowDev), state.packageFiles)
|
||||
println("Reloaded: " + newState.packageFiles.mkString(", "))
|
||||
rebuildReader(newState)
|
||||
}
|
||||
|
||||
def list(state: State): Unit = {
|
||||
state.packages.foreach {
|
||||
case (pkgId, pkg) =>
|
||||
@ -321,26 +314,25 @@ object Repl {
|
||||
}
|
||||
|
||||
// Load DAML-LF packages from a set of files.
|
||||
def load(state: State, files: Seq[String]): State = {
|
||||
def load(darFile: String): State = {
|
||||
val state = initialState()
|
||||
try {
|
||||
val decode: Decode = if (state.allowDev) {
|
||||
Decode.WithDevSupport
|
||||
} else {
|
||||
Decode
|
||||
}
|
||||
val newPackages: Seq[(PackageId, Package)] =
|
||||
files.map(file => decode.decodeArchiveFromInputStream(new FileInputStream(file)))
|
||||
val npkgs = newPackages.size
|
||||
val packages =
|
||||
UniversalArchiveReader().readFile(new File(darFile)).get
|
||||
val packagesMap = Map(packages.all.map {
|
||||
case (pkgId, pkgArchive) => Decode.readArchivePayloadAndVersion(pkgId, pkgArchive)._1
|
||||
}: _*)
|
||||
val (mainPkgId, mainPkgArchive) = packages.main
|
||||
val mainPkg = Decode.readArchivePayloadAndVersion(mainPkgId, mainPkgArchive)._1._2
|
||||
val npkgs = packagesMap.size
|
||||
val ndefs =
|
||||
newPackages.flatMap(_._2.modules.values.map(_.definitions.size)).sum
|
||||
packagesMap.flatMap(_._2.modules.values.map(_.definitions.size)).sum
|
||||
println(s"$ndefs definitions from $npkgs package(s) loaded.")
|
||||
|
||||
val oldAndNew = state.packages ++ newPackages.toMap
|
||||
rebuildReader(
|
||||
state.copy(
|
||||
packages = oldAndNew,
|
||||
packageFiles = state.packageFiles ++ files,
|
||||
scenarioRunner = ScenarioRunnerHelper(oldAndNew)
|
||||
packages = packagesMap,
|
||||
scenarioRunner = ScenarioRunnerHelper(packagesMap)
|
||||
))
|
||||
} catch {
|
||||
case ex: Throwable => {
|
||||
|
@ -33,12 +33,11 @@ da_scala_test(
|
||||
size = "large",
|
||||
timeout = "long",
|
||||
srcs = glob(["src/test/**/*.scala"]),
|
||||
resources = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//daml-lf/tests:AuthorizedDivulgence.dalf",
|
||||
"//daml-lf/tests:BasicTests.dalf",
|
||||
"//daml-lf/tests:DontDiscloseNonConsumingChoicesToObservers.dalf",
|
||||
"//daml-lf/tests:LargeTransaction.dalf",
|
||||
data = [
|
||||
"//daml-lf/tests:AuthorizedDivulgence.dar",
|
||||
"//daml-lf/tests:BasicTests.dar",
|
||||
"//daml-lf/tests:DontDiscloseNonConsumingChoicesToObservers.dar",
|
||||
"//daml-lf/tests:LargeTransaction.dar",
|
||||
],
|
||||
scalacopts = lf_scalacopts,
|
||||
deps = [
|
||||
|
@ -8,6 +8,8 @@ import com.digitalasset.daml.lf.lfpackage.Decode
|
||||
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
|
||||
import org.scalatest.time.{Millis, Span}
|
||||
import org.scalatest.{Matchers, WordSpec}
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import java.io.File
|
||||
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
|
||||
@ -16,31 +18,22 @@ class SemanticTests extends WordSpec with Matchers with ScalaFutures with Integr
|
||||
|
||||
// these files are generated by bazel. Add new tests here.
|
||||
|
||||
val dalfFiles = List(
|
||||
"daml-lf/tests/BasicTests.dalf",
|
||||
"daml-lf/tests/AuthorizedDivulgence.dalf",
|
||||
"daml-lf/tests/DontDiscloseNonConsumingChoicesToObservers.dalf",
|
||||
"daml-lf/tests/LargeTransaction.dalf",
|
||||
val darFiles = List(
|
||||
"daml-lf/tests/BasicTests.dar",
|
||||
"daml-lf/tests/AuthorizedDivulgence.dar",
|
||||
"daml-lf/tests/DontDiscloseNonConsumingChoicesToObservers.dar",
|
||||
"daml-lf/tests/LargeTransaction.dar",
|
||||
)
|
||||
|
||||
val libraryDalfs = List(
|
||||
"daml-foundations/daml-ghc/package-database/deprecated/daml-prim-1.3.dalf"
|
||||
)
|
||||
|
||||
val testPkgs = dalfFiles.map(file => {
|
||||
val stream = loader.getResourceAsStream(file)
|
||||
(
|
||||
file.stripPrefix("daml-lf/tests").stripSuffix(".dalf"),
|
||||
Decode.decodeArchiveFromInputStream(stream))
|
||||
})
|
||||
|
||||
val libraryPkgs = libraryDalfs.map(file => {
|
||||
val stream = loader.getResourceAsStream(file)
|
||||
(
|
||||
file
|
||||
.stripPrefix("daml-foundations/daml-ghc/package-database/deprecated")
|
||||
.stripSuffix(".dalf"),
|
||||
Decode.decodeArchiveFromInputStream(stream))
|
||||
val testPackages = darFiles.map(file => {
|
||||
val packages =
|
||||
UniversalArchiveReader().readFile(new File(file)).get
|
||||
val packagesMap = Map(packages.all.map {
|
||||
case (pkgId, pkgArchive) => Decode.readArchivePayloadAndVersion(pkgId, pkgArchive)._1
|
||||
}: _*)
|
||||
val (mainPkgId, mainPkgArchive) = packages.main
|
||||
val mainPkg = Decode.readArchivePayloadAndVersion(mainPkgId, mainPkgArchive)._1._2
|
||||
(mainPkgId, mainPkg, packagesMap)
|
||||
})
|
||||
|
||||
def countTests(pkg: Package): Int = {
|
||||
@ -55,15 +48,17 @@ class SemanticTests extends WordSpec with Matchers with ScalaFutures with Integr
|
||||
|
||||
// TODO (drsk): We need to test daml 1.2 packages separate, because we can not mix 1.x packages
|
||||
// with 1.2 packages. Merge this with the above tests once we retire 1.x.
|
||||
testPkgs.foreach {
|
||||
case (name, testPkg) =>
|
||||
val testCount = countTests(testPkg._2)
|
||||
s"run $testCount scenarios $name in engine" should {
|
||||
testPackages.foreach {
|
||||
case (pkgId, pkg, allPackages) =>
|
||||
val testCount = countTests(pkg)
|
||||
s"run $testCount scenarios $pkgId in engine" should {
|
||||
"complete" in {
|
||||
assert(testCount > 0)
|
||||
val pkgs = (testPkg +: libraryPkgs.map(_._2)).toMap
|
||||
val semanticTester =
|
||||
new SemanticTester(_ => new SemanticTester.EngineLedger(pkgs), testPkg._1, pkgs)
|
||||
new SemanticTester(
|
||||
_ => new SemanticTester.EngineLedger(allPackages),
|
||||
pkgId,
|
||||
allPackages)
|
||||
// NOTE(JM, SC): default 15 second timeout (assuming that comfortably
|
||||
// fits any constant factors), and we broke 30s around 400 market infrastructure
|
||||
// scenarios; 50ms/test would give us 35s for 400, 75 gives us 45s,
|
||||
|
@ -18,7 +18,6 @@ TEST_FILES = \
|
||||
[
|
||||
daml_compile(
|
||||
name = name,
|
||||
damlc = "//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
main_src = "%s.daml" % name,
|
||||
target = "1.3",
|
||||
visibility = ["//daml-lf:__subpackages__"],
|
||||
@ -28,7 +27,6 @@ TEST_FILES = \
|
||||
|
||||
daml_compile(
|
||||
name = "Optional",
|
||||
damlc = "//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
main_src = "Optional.daml",
|
||||
target = "1.3",
|
||||
visibility = ["//daml-lf:__subpackages__"],
|
||||
@ -42,12 +40,12 @@ daml_compile(
|
||||
args = [
|
||||
"$(location //daml-lf/repl:repl)",
|
||||
"$(location //daml-foundations/daml-tools/da-hs-damlc-app)",
|
||||
"$(location :%s.dalf)" % name,
|
||||
"$(location :%s.dar)" % name,
|
||||
],
|
||||
data = [
|
||||
"//daml-foundations/daml-tools/da-hs-damlc-app",
|
||||
"//daml-lf/repl",
|
||||
":%s.dalf" % name,
|
||||
":%s.dar" % name,
|
||||
],
|
||||
)
|
||||
for name in TEST_FILES
|
||||
@ -60,13 +58,11 @@ daml_compile(
|
||||
srcs = ["scenario/test.sh"],
|
||||
args = [
|
||||
"$(location //daml-lf/repl:repl)",
|
||||
"$(location //daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated)",
|
||||
"$(location //daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app)",
|
||||
"$(location :%s)" % file,
|
||||
"$(location //daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf)",
|
||||
],
|
||||
data = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
"//daml-foundations/daml-tools/da-hs-damlc-app",
|
||||
"//daml-lf/repl",
|
||||
file,
|
||||
"%s/EXPECTED.ledger" % "/".join(file.split("/")[0:3]),
|
||||
|
@ -15,12 +15,12 @@ cleanup() {
|
||||
trap cleanup EXIT
|
||||
|
||||
case "${MAIN##*.}" in
|
||||
dalf)
|
||||
dar)
|
||||
$DAML_LF_REPL testAll "$MAIN"
|
||||
;;
|
||||
daml)
|
||||
$DAMLC export-lf-v1 "$MAIN" -o $TMPDIR/out.dalf
|
||||
$DAML_LF_REPL testAll $TMPDIR/out.dalf
|
||||
$DAMLC compile "$MAIN" main -o $TMPDIR/out.dar
|
||||
$DAML_LF_REPL testAll $TMPDIR/out.dar
|
||||
;;
|
||||
*)
|
||||
echo "Unknown file extension on $MAIN" 1>&2
|
||||
|
@ -14,15 +14,14 @@ REPL=$1
|
||||
DAMLC=$2
|
||||
TESTMAIN=$3
|
||||
TESTDIR="$(dirname $TESTMAIN)"
|
||||
TESTDALF="$TESTDIR/Main.dalf"
|
||||
TESTDAR="$TESTDIR/Main.dar"
|
||||
|
||||
GHC_PRIM_DALF=${4:-""}
|
||||
TARGET="1.3"
|
||||
|
||||
REGEX_HIDE_HASHES="s,@[a-z0-9]{8},@XXXXXXXX,g"
|
||||
|
||||
$DAMLC compile --debug --target $TARGET $TESTMAIN -o $TESTDALF
|
||||
$DAMLC package --debug --target $TARGET $TESTMAIN 'main' -o $TESTDAR
|
||||
|
||||
$REPL test Test:run $TESTDALF $GHC_PRIM_DALF | sed '1d' | sed -E "$REGEX_HIDE_HASHES" > ${TESTDIR}/ACTUAL.ledger
|
||||
$REPL test Test:run $TESTDAR | sed '1d' | sed -E "$REGEX_HIDE_HASHES" > ${TESTDIR}/ACTUAL.ledger
|
||||
|
||||
diff ${PWD}/${TESTDIR}/ACTUAL.ledger ${PWD}/${TESTDIR}/EXPECTED.ledger
|
||||
|
@ -41,7 +41,6 @@ compileDependencies = [
|
||||
|
||||
daml_compile(
|
||||
name = "RecordsAndVariants",
|
||||
damlc = "//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
main_src = "src/test/resources/damls/RecordsAndVariants.daml",
|
||||
target = "1.3",
|
||||
)
|
||||
@ -103,10 +102,9 @@ da_scala_test_suite(
|
||||
size = "small",
|
||||
srcs = glob(["src/test/suite/**/*.scala"]),
|
||||
data = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//extractor:PrimitiveTypes.dalf",
|
||||
"//extractor:RecordsAndVariants.dalf",
|
||||
"//extractor:TransactionExample.dalf",
|
||||
"//extractor:PrimitiveTypes.dar",
|
||||
"//extractor:RecordsAndVariants.dar",
|
||||
"//extractor:TransactionExample.dar",
|
||||
],
|
||||
resources = glob(["src/test/resources/**/*"]),
|
||||
deps = [
|
||||
|
@ -25,7 +25,7 @@ class BasicPrimitiveTypesSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dalf")
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("PrimitiveTypes:primitives")
|
||||
|
||||
|
@ -25,7 +25,7 @@ class ListsSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dalf")
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("PrimitiveTypes:lists")
|
||||
|
||||
|
@ -25,7 +25,7 @@ class MapsSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dalf")
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("PrimitiveTypes:maps")
|
||||
|
||||
|
@ -25,7 +25,7 @@ class OptionalSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dalf")
|
||||
override protected def darFile = new File("extractor/PrimitiveTypes.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("PrimitiveTypes:optionals")
|
||||
|
||||
|
@ -25,7 +25,7 @@ class RecordsAndVariantsSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/RecordsAndVariants.dalf")
|
||||
override protected def darFile = new File("extractor/RecordsAndVariants.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("RecordsAndVariants:suite")
|
||||
|
||||
|
@ -30,7 +30,7 @@ class TransactionSpec
|
||||
with Matchers
|
||||
with CustomMatchers {
|
||||
|
||||
override protected def darFile = new File("extractor/TransactionExample.dalf")
|
||||
override protected def darFile = new File("extractor/TransactionExample.dar")
|
||||
|
||||
override def scenario: Option[String] = Some("TransactionExample:example")
|
||||
|
||||
|
@ -61,8 +61,7 @@ da_scala_test_suite(
|
||||
name = "ledger-api-integration-tests",
|
||||
srcs = glob(["src/test/itsuite/**/*.scala"]),
|
||||
data = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//ledger/sandbox:Test.dalf",
|
||||
"//ledger/sandbox:Test.dar",
|
||||
],
|
||||
deps = [
|
||||
":ledger-api-integration-tests-lib",
|
||||
@ -71,7 +70,7 @@ da_scala_test_suite(
|
||||
|
||||
# A standalone semantic test runner.
|
||||
# Run with:
|
||||
# bazel run //ledger/ledger-api-integration-tests:semantic-test-runner -- <dalf>...
|
||||
# bazel run //ledger/ledger-api-integration-tests:semantic-test-runner -- <dar>...
|
||||
#
|
||||
# This is used to test new ledger backend implementations.
|
||||
da_scala_binary(
|
||||
@ -96,8 +95,7 @@ da_scala_test_suite(
|
||||
name = "ledger-api-semantic-tests",
|
||||
srcs = glob(["src/test/semanticsuite/**/*.scala"]),
|
||||
data = [
|
||||
":SemanticTests.dalf",
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
":SemanticTests.dar",
|
||||
],
|
||||
deps = [
|
||||
":ledger-api-integration-tests-lib",
|
||||
@ -112,7 +110,6 @@ TEST_FILES = [
|
||||
[
|
||||
daml_compile(
|
||||
name = name,
|
||||
damlc = "//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
main_src = "src/test/semanticsuite/daml/%s.daml" % name,
|
||||
target = "1.3",
|
||||
visibility = ["//visibility:public"],
|
||||
|
@ -49,7 +49,7 @@ class PackageServiceIT
|
||||
|
||||
"return it" in allFixtures { context =>
|
||||
client(context.packageService).listPackages() map {
|
||||
_.packageIds.size shouldEqual 2
|
||||
_.packageIds.size shouldEqual 3 // package, stdlib, daml-prim
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ object PlatformApplications {
|
||||
ledgerId.getOrElse(
|
||||
throw new IllegalStateException("Attempted to access ledger ID, but none is configured."))
|
||||
|
||||
def withDarFile(path: Path) = copy(darFiles = List(Config.ghcPrimFileName.toPath, path))
|
||||
def withDarFile(path: Path) = copy(darFiles = List(path))
|
||||
|
||||
def withDarFiles(path: List[Path]) = copy(darFiles = path)
|
||||
|
||||
@ -66,18 +66,13 @@ object PlatformApplications {
|
||||
object Config {
|
||||
val defaultLedgerId = "ledger server"
|
||||
|
||||
val defaultDarFile = new File("ledger/sandbox/Test.dalf")
|
||||
val ghcPrimFileName = new File(
|
||||
"daml-foundations/daml-ghc/package-database/deprecated/daml-prim-1.3.dalf")
|
||||
val defaultDarFile = new File("ledger/sandbox/Test.dar")
|
||||
|
||||
val defaultParties = NonEmptyList("party", "Alice", "Bob")
|
||||
val defaultTimeProviderType = TimeProviderType.Static
|
||||
|
||||
def defaultWithLedgerId(ledgerId: Option[String]): Config = {
|
||||
val ghcPrimUrl = ghcPrimFileName
|
||||
val darFiles =
|
||||
if (ghcPrimUrl.exists()) List(ghcPrimFileName, defaultDarFile)
|
||||
else sys.error(s"daml-prim not found at location $ghcPrimFileName")
|
||||
val darFiles = List(defaultDarFile)
|
||||
new Config(
|
||||
ledgerId,
|
||||
darFiles.map(_.toPath),
|
||||
|
@ -3,11 +3,12 @@
|
||||
|
||||
package com.digitalasset.platform.tests.integration.ledger.api
|
||||
|
||||
import java.io.{File, FileInputStream, InputStream}
|
||||
import java.io.File
|
||||
|
||||
import akka.Done
|
||||
import akka.stream.Materializer
|
||||
import akka.stream.scaladsl.{Flow, Sink, Source}
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import com.digitalasset.ledger.api.testing.utils.MockMessages.submitRequest
|
||||
import com.digitalasset.ledger.api.v1.command_submission_service.SubmitRequest
|
||||
import com.digitalasset.ledger.api.v1.commands.Command.Command.Create
|
||||
@ -16,7 +17,6 @@ import com.digitalasset.ledger.api.v1.completion.Completion
|
||||
import com.digitalasset.ledger.api.v1.value.Value.Sum
|
||||
import com.digitalasset.ledger.api.v1.value.{Identifier, Record, RecordField, Value}
|
||||
import com.digitalasset.platform.PlatformApplications
|
||||
import com.digitalasset.platform.damllf.PackageParser
|
||||
import com.digitalasset.util.Ctx
|
||||
import org.scalatest.Matchers
|
||||
|
||||
@ -26,11 +26,7 @@ trait TransactionServiceHelpers extends Matchers {
|
||||
lazy val defaultDar: File = PlatformApplications.Config.defaultDarFile
|
||||
|
||||
lazy val parsedPackageId: String = {
|
||||
val dar: InputStream = new FileInputStream(defaultDar)
|
||||
PackageParser
|
||||
.getPackageIdFromDalf(dar)
|
||||
.map(_.underlyingString)
|
||||
.getOrElse(throw new RuntimeException(s"failed to load DAR package: ${defaultDar}!"))
|
||||
UniversalArchiveReader().readFile(defaultDar).get.main._1.underlyingString
|
||||
}
|
||||
|
||||
val failingCommandId: String = "asyncFail"
|
||||
|
@ -5,7 +5,8 @@ package com.digitalasset.platform.semantictest
|
||||
|
||||
import java.io.{BufferedInputStream, File, FileInputStream}
|
||||
|
||||
import com.digitalasset.daml.lf.data.Ref.{PackageId}
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import com.digitalasset.daml.lf.data.Ref.PackageId
|
||||
import com.digitalasset.daml.lf.engine.testing.SemanticTester
|
||||
import com.digitalasset.daml.lf.lfpackage.{Ast, Decode}
|
||||
import com.digitalasset.ledger.api.testing.utils.{
|
||||
@ -16,8 +17,6 @@ import com.digitalasset.platform.apitesting.MultiLedgerFixture
|
||||
import com.digitalasset.platform.services.time.TimeProviderType
|
||||
import org.scalatest.{AsyncWordSpec, Matchers}
|
||||
|
||||
import scala.collection.breakOut
|
||||
|
||||
class SandboxSemanticTestsLfRunner
|
||||
extends AsyncWordSpec
|
||||
with Matchers
|
||||
@ -25,18 +24,23 @@ class SandboxSemanticTestsLfRunner
|
||||
with SuiteResourceManagementAroundAll
|
||||
with AkkaBeforeAndAfterAll {
|
||||
|
||||
private val darFile = new File("ledger/ledger-api-integration-tests/SemanticTests.dalf")
|
||||
private val darFile = new File("ledger/ledger-api-integration-tests/SemanticTests.dar")
|
||||
|
||||
override protected lazy val config: Config = Config.default
|
||||
.withDarFile(darFile.toPath)
|
||||
.withTimeProvider(TimeProviderType.StaticAllowBackwards)
|
||||
|
||||
lazy val packages: Map[PackageId, Ast.Package] =
|
||||
List(darFile, Config.ghcPrimFileName).map(readPackage)(breakOut)
|
||||
lazy val (mainPkgId, packages) = {
|
||||
val dar = UniversalArchiveReader().readFile(darFile).get
|
||||
val packages = Map(dar.all.map {
|
||||
case (pkgId, archive) => Decode.readArchivePayloadAndVersion(pkgId, archive)._1
|
||||
}: _*)
|
||||
(dar.main._1, packages)
|
||||
}
|
||||
|
||||
s"sandbox launched with $darFile" should {
|
||||
for {
|
||||
(pkgId, names) <- SemanticTester.scenarios(packages)
|
||||
(pkgId, names) <- SemanticTester.scenarios(Map(mainPkgId -> packages(mainPkgId))) // we only care about the main pkg
|
||||
name <- names
|
||||
} {
|
||||
s"run scenario: $name" in allFixtures { ledger =>
|
||||
|
@ -55,9 +55,8 @@ da_scala_benchmark_jmh(
|
||||
name = "sandbox-perf",
|
||||
srcs = glob(["src/perf/benches/**/*.scala"]),
|
||||
data = [
|
||||
"//daml-foundations/daml-ghc/package-database:daml-prim-1.3.dalf",
|
||||
"//ledger/sandbox:Test.dalf",
|
||||
"//ledger/sandbox-perf:LargeTransaction.dalf",
|
||||
"//ledger/sandbox:Test.dar",
|
||||
"//ledger/sandbox-perf:LargeTransaction.dar",
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
|
@ -17,7 +17,7 @@ import org.openjdk.jmh.annotations.Benchmark
|
||||
|
||||
class AcsBench extends TestCommands with InfAwait {
|
||||
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dalf")
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dar")
|
||||
|
||||
private def generateCommand(
|
||||
sequenceNumber: Int,
|
||||
|
@ -10,7 +10,7 @@ import org.openjdk.jmh.annotations.Benchmark
|
||||
|
||||
class SimpleBench extends DummyCommands with InfAwait {
|
||||
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dalf")
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dar")
|
||||
@Benchmark
|
||||
def ingest10kCommands(state: PerfBenchState): Unit = {
|
||||
val commandCount = 10000L
|
||||
|
@ -21,5 +21,5 @@ class AcsBenchState extends PerfBenchState with DummyCommands with InfAwait {
|
||||
()
|
||||
}
|
||||
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dalf")
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dar")
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ import scala.concurrent.duration._
|
||||
|
||||
trait TestHelper {
|
||||
|
||||
val darFile: File = new File("ledger/sandbox-perf/LargeTransaction.dalf")
|
||||
val darFile: File = new File("ledger/sandbox-perf/LargeTransaction.dar")
|
||||
|
||||
val largeTxPackageId: PackageId = DarUtil.getPackageId(darFile)
|
||||
|
||||
|
@ -3,16 +3,13 @@
|
||||
|
||||
package com.digitalasset.platform.sandbox.perf.util
|
||||
|
||||
import java.io.{File, FileInputStream}
|
||||
import java.io.File
|
||||
|
||||
import com.digitalasset.daml.lf.UniversalArchiveReader
|
||||
import com.digitalasset.daml.lf.data.Ref.PackageId
|
||||
import com.digitalasset.daml.lf.lfpackage.Decode
|
||||
import com.digitalasset.daml_lf.DamlLf.Archive
|
||||
|
||||
object DarUtil {
|
||||
def getPackageId(dalf: File): PackageId = {
|
||||
val archive = Archive.parseFrom(new FileInputStream(dalf))
|
||||
val (packageId, _) = Decode.decodeArchive(archive)
|
||||
packageId
|
||||
UniversalArchiveReader().readFile(dalf).get.main._1
|
||||
}
|
||||
}
|
||||
|
@ -111,7 +111,6 @@ genrule(
|
||||
|
||||
daml_compile(
|
||||
name = "Test",
|
||||
damlc = "//daml-foundations/daml-tools/da-hs-damlc-app:da-hs-damlc-app-deprecated",
|
||||
main_src = "src/test/resources/damls/Test.daml",
|
||||
target = "1.3",
|
||||
visibility = ["//visibility:public"],
|
||||
@ -141,8 +140,7 @@ da_scala_test_suite(
|
||||
["src/test/suite/**/*.scala"],
|
||||
),
|
||||
data = [
|
||||
"//daml-foundations/daml-ghc/package-database/deprecated:daml-prim-1.3.dalf",
|
||||
"//ledger/sandbox:Test.dalf",
|
||||
"//ledger/sandbox:Test.dar",
|
||||
],
|
||||
resources = glob(["src/test/resources/**/*"]),
|
||||
deps = [
|
||||
|
@ -23,7 +23,7 @@ import com.digitalasset.platform.services.time.TimeModel
|
||||
import scala.concurrent.{ExecutionContext, Future}
|
||||
|
||||
object TestDar {
|
||||
val dalfFile: File = new File("ledger/sandbox/Test.dalf")
|
||||
val dalfFile: File = new File("ledger/sandbox/Test.dar")
|
||||
// DamlLf1 test package
|
||||
lazy val parsedPackage = DamlPackageContainer(List(dalfFile))
|
||||
lazy val parsedArchive = parsedPackage.archives.head
|
||||
|
@ -28,10 +28,7 @@ import scala.util.Try
|
||||
trait SandboxFixture extends SuiteResource[Channel] {
|
||||
self: Suite =>
|
||||
|
||||
protected def darFile = new File("ledger/sandbox/Test.dalf")
|
||||
|
||||
protected def ghcPrimFile =
|
||||
new File("daml-foundations/daml-ghc/package-database/deprecated/daml-prim-1.3.dalf")
|
||||
protected def darFile = new File("ledger/sandbox/Test.dar")
|
||||
|
||||
protected def channel: Channel = suiteResource.value
|
||||
|
||||
@ -60,7 +57,7 @@ trait SandboxFixture extends SuiteResource[Channel] {
|
||||
ledgerIdMode = LedgerIdMode.HardCoded("sandbox server")
|
||||
)
|
||||
|
||||
protected def packageFiles: List[File] = List(darFile, ghcPrimFile)
|
||||
protected def packageFiles: List[File] = List(darFile)
|
||||
|
||||
protected def scenario: Option[String] = None
|
||||
|
||||
@ -72,4 +69,4 @@ trait SandboxFixture extends SuiteResource[Channel] {
|
||||
|
||||
def getSandboxPort: Int = sandboxResource.getPort
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ class SandboxContextSpec extends WordSpec with Matchers with TestHelpers {
|
||||
val time = Array("--static-time")
|
||||
val dar = Array(TestDar.dalfFile.toString)
|
||||
|
||||
val dalfFileName = "ledger/sandbox/Test.dalf"
|
||||
val dalfFileName = "ledger/sandbox/Test.dar"
|
||||
val dalf = Array(dalfFileName)
|
||||
|
||||
val Some(ctx) = SandboxContext(port ++ time ++ dar ++ dalf)
|
||||
|
@ -90,7 +90,7 @@ class EventConverterSpec
|
||||
.withCommands(Seq(create))
|
||||
|
||||
object CommandsToTest extends TestCommands {
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dalf")
|
||||
override protected def darFile: File = new File("ledger/sandbox/Test.dar")
|
||||
|
||||
val damlPackageContainer = DamlPackageContainer(scala.collection.immutable.List(darFile), true)
|
||||
val onKbCmd = oneKbCommandRequest("ledgerId", "big").getCommands
|
||||
|
Loading…
Reference in New Issue
Block a user