Add golden test for example Daml ledger export (#9732)

* client_server_build - user defined outputs

Let the user define a list of output files on client_server_build.

changelog_begin
changelog_end

* daml-script runner expose main(config)

changelog_begin
changelog_end

* Add an example Daml ledger export to the docs

* Build Daml ledger export

changelog_begin
changelog_end

* sh_inline_test

Support toolchain arguments to sh_inline_test. Usefuly for make variable
expansion, e.g. for the POSIX toolchain.

* Add a golden test for Daml ledger export

* Test args files as well

* Use sed from POSIX toolchain

* Add normalization comment to top-level comment

* Ignore trailing CR on Windows

The JSON formatting of the args file uses Windows line endings on
Windows. Therefore, the args.json output technically differs from the
expected output. We're happy to ignore the difference in CRLF here.

Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io>
This commit is contained in:
Andreas Herrmann 2021-05-19 14:02:14 +02:00 committed by GitHub
parent bb5dd4cbf3
commit 10177d239e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 426 additions and 115 deletions

View File

@ -4,7 +4,7 @@
def _client_server_build_impl(ctx):
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
ctx.actions.run_shell(
outputs = [ctx.outputs.out],
outputs = ctx.outputs.outs,
inputs = ctx.files.data,
tools = depset([
ctx.executable.runner,
@ -21,7 +21,7 @@ def _client_server_build_impl(ctx):
""".format(
cat = posix.commands["cat"],
output_env = ctx.attr.output_env,
output_path = ctx.outputs.out.path,
output_path = " ".join([o.path for o in ctx.outputs.outs]),
runner = ctx.executable.runner.path,
client = ctx.executable.client.path,
server = ctx.executable.server.path,
@ -34,7 +34,7 @@ def _client_server_build_impl(ctx):
)
return [
DefaultInfo(
files = depset([ctx.outputs.out]),
files = depset(ctx.outputs.outs),
),
]
@ -60,27 +60,26 @@ client_server_build = rule(
),
"server_args": attr.string_list(),
"server_files": attr.label_list(allow_files = True),
"outs": attr.output_list(mandatory = True),
"output_env": attr.string(),
"data": attr.label_list(allow_files = True),
},
outputs = {
"out": "%{name}.out",
},
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
"""Creates a build target for a client-server run.
This rule is similar to the client_server_test rule, but
instead of producing a test target it produces a build target
that creates some result file from the run. Useful for producing
that creates some result files from the run. Useful for producing
test data from integration tests that can then be used for
e.g. testing backwards compatibility.
The rule takes mostly the same arguments as the client_server_test
rule, with the exception that {client,server}_files is a label_list.
Additionally it takes the required argument "output_env", which specifies
the environment variable in which to store the output filename. This variable can be
used either by the client or the server to write the output.
The rule takes mostly the same arguments as the client_server_test rule, with
the exception that {client,server}_files is a label_list. Additionally it takes
the required arguments "output" and "output_env", which specify the list of
output files and an environment variable in which to store the paths to the
output files in a space separated list. This variable can be used either by the
client or the server to write the output.
Note that additional data files are not provided as runfiles (as they are
with the client_server_test rule), but rather placed to a relative to working
@ -97,6 +96,7 @@ Example:
server = ":my_server",
server_args = ["--fast"],
server_files = [":file-target-for-server"],
outs = ["my_client_server_build.out"],
output_env = "MY_TEST_OUT",
)
```

View File

@ -50,6 +50,7 @@ client_server_test(
client_server_build(
name = "build",
outs = ["build.out"],
client = ":client",
client_args = [
"--foobar",

View File

@ -49,6 +49,7 @@ def sh_inline_test(
name,
cmd,
data = [],
toolchains = [],
**kwargs):
testonly = kwargs.pop("testonly", True)
_sh_inline_script(
@ -57,6 +58,7 @@ def sh_inline_test(
output = name + ".sh",
data = data,
testonly = testonly,
toolchains = toolchains,
)
native.sh_test(
name = name,

View File

@ -1,6 +1,14 @@
# Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load(
"//bazel_tools/client_server:client_server_build.bzl",
"client_server_build",
)
load(
"//bazel_tools/sh:sh.bzl",
"sh_inline_test",
)
load(
"//bazel_tools:scala.bzl",
"da_scala_binary",
@ -111,3 +119,69 @@ da_scala_test(
"@maven//:io_netty_netty_handler",
],
)
da_scala_binary(
name = "example-export-client",
srcs = ["src/example-export/scala/com/daml/script/export/ExampleClient.scala"],
main_class = "com.daml.script.export.ExampleClient",
scala_deps = [
"@maven//:com_github_scopt_scopt",
],
deps = [
":export",
"//daml-lf/data",
"//daml-script/runner:script-runner-lib",
"//language-support/scala/bindings",
"//language-support/scala/bindings-akka",
"//ledger/ledger-api-common",
"//libs-scala/auth-utils",
],
)
client_server_build(
name = "example-export",
outs = [
"example-export/Export.daml",
"example-export/args.json",
],
client = ":example-export-client",
client_files = ["//daml-script/test:script-test.dar"],
data = ["//daml-script/test:script-test.dar"],
output_env = "EXPORT_OUT",
server = "//ledger/sandbox:sandbox-binary",
server_files = ["//daml-script/test:script-test.dar"],
)
# Compare the generated Daml ledger export to the example export used in the
# documentation. This functions as both a golden test on ledger exports and to
# make sure that the documentation stays up-to-date.
#
# Normalizes the expected output by removing the copyright header and any
# documentation import markers and normalizes the actual output by adding a
# newline to the last line if missing.
sh_inline_test(
name = "example-export-compare",
cmd = """\
EXPECTED_EXPORT=$$(canonicalize_rlocation $(rootpath //docs:source/tools/export/output-root/Export.daml))
EXPECTED_ARGS=$$(canonicalize_rlocation $(rootpath //docs:source/tools/export/output-root/args.json))
ACTUAL_EXPORT=$$(canonicalize_rlocation $(rootpath :example-export/Export.daml))
ACTUAL_ARGS=$$(canonicalize_rlocation $(rootpath :example-export/args.json))
# Normalize the expected file by removing the copyright header and any documentation import markers.
# Normalize the actual output by adding a newline to the last line if missing.
$(POSIX_DIFF) -Naur --strip-trailing-cr <($(POSIX_SED) '1,3d;/^-- EXPORT/d' $$EXPECTED_EXPORT) <($(POSIX_SED) '$$a\\' $$ACTUAL_EXPORT) || {
echo "$$EXPECTED_EXPORT did not match $$ACTUAL_EXPORT"
exit 1
}
$(POSIX_DIFF) -Naur --strip-trailing-cr $$EXPECTED_ARGS <($(POSIX_SED) '$$a\\' $$ACTUAL_ARGS) || {
echo "$$EXPECTED_ARGS did not match $$ACTUAL_ARGS"
exit 1
}
""",
data = [
":example-export/Export.daml",
":example-export/args.json",
"//docs:source/tools/export/output-root/Export.daml",
"//docs:source/tools/export/output-root/args.json",
],
toolchains = ["@rules_sh//sh/posix:make_variables"],
)

View File

@ -0,0 +1,117 @@
// Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.script.export
import java.io.File
import java.nio.file.{Path, Paths}
import java.time.Duration
import com.daml.ledger.api.tls.TlsConfiguration
import com.daml.lf.engine.script.{RunnerConfig, RunnerMain}
case class ExampleClientConfig(
darPath: File,
targetPort: Int,
outputPath: Path,
)
object ExampleClientConfig {
def parse(args: Array[String]): Option[ExampleClientConfig] =
parser.parse(
args,
ExampleClientConfig(
darPath = null,
targetPort = -1,
outputPath = null,
),
)
private def parseExportOut(
envVar: String
): Either[String, ExampleClientConfig => ExampleClientConfig] = {
if (envVar.isEmpty) Left("Environment variable EXPORT_OUT must not be empty")
else
envVar.split(" ") match {
case Array(export_daml, args_json) =>
val export_daml_path = Paths.get(export_daml)
val args_json_path = Paths.get(args_json)
if (export_daml_path.getParent == null) {
Left("First component in environment variable EXPORT_OUT has no parent")
} else if (export_daml_path.getParent != args_json_path.getParent) {
Left(
"First and second component in environment variable EXPORT_OUT have different parent"
)
} else {
Right(c => c.copy(outputPath = export_daml_path.getParent))
}
case _ => Left("Environment variable EXPORT_OUT must contain one path")
}
}
private val parser = new scopt.OptionParser[ExampleClientConfig]("script-export") {
help("help")
.text("Show this help message.")
opt[Int]("target-port")
.required()
.action((x, c) => c.copy(targetPort = x))
.text("Daml ledger port to connect to.")
opt[String]("output")
.hidden()
.withFallback(() => sys.env.getOrElse("EXPORT_OUT", ""))
.validate(x => parseExportOut(x).map(_ => ()))
.action { (x, c) =>
parseExportOut(x) match {
case Left(msg) =>
throw new RuntimeException(s"Failed to validate EXPORT_OUT environment variable: $msg")
case Right(f) => f(c)
}
}
arg[File]("dar")
.required()
.action((f, c) => c.copy(darPath = f))
.text("Path to the dar file containing the initialization script")
}
}
object ExampleClient {
def main(args: Array[String]): Unit = {
ExampleClientConfig.parse(args) match {
case Some(clientConfig) => main(clientConfig)
case None => sys.exit(1)
}
}
def main(clientConfig: ExampleClientConfig): Unit = {
RunnerMain.main(
RunnerConfig(
darPath = clientConfig.darPath,
scriptIdentifier = "ScriptExample:initializeFixed",
ledgerHost = Some("localhost"),
ledgerPort = Some(clientConfig.targetPort),
participantConfig = None,
timeMode = Some(RunnerConfig.DefaultTimeMode),
commandTtl = Duration.ofSeconds(30L),
inputFile = None,
outputFile = None,
accessTokenFile = None,
tlsConfig = TlsConfiguration(false, None, None, None),
jsonApi = false,
maxInboundMessageSize = RunnerConfig.DefaultMaxInboundMessageSize,
applicationId = None,
)
)
Main.main(
Config.Empty.copy(
ledgerHost = "localhost",
ledgerPort = clientConfig.targetPort,
parties = Seq("Alice", "Bob"),
exportType = Some(
Config.EmptyExportScript.copy(
sdkVersion = "0.0.0",
outputPath = clientConfig.outputPath,
)
),
)
)
}
}

View File

@ -117,7 +117,7 @@ object Config {
)
}
private val EmptyExportScript = ExportScript(
val EmptyExportScript = ExportScript(
outputPath = null,
sdkVersion = "",
acsBatchSize = 10,
@ -135,7 +135,7 @@ object Config {
}
}
private val Empty = Config(
val Empty = Config(
ledgerHost = "",
ledgerPort = -1,
tlsConfig = TlsConfiguration(false, None, None, None),

View File

@ -29,108 +29,109 @@ import com.daml.auth.TokenHolder
object RunnerMain {
def main(args: Array[String]): Unit = {
RunnerConfig.parse(args) match {
case None => sys.exit(1)
case Some(config) => {
val encodedDar: Dar[(PackageId, DamlLf.ArchivePayload)] =
DarReader().readArchiveFromFile(config.darPath).get
val dar: Dar[(PackageId, Package)] = encodedDar.map { case (pkgId, pkgArchive) =>
Decode.readArchivePayload(pkgId, pkgArchive)
}
val scriptId: Identifier =
Identifier(dar.main._1, QualifiedName.assertFromString(config.scriptIdentifier))
val timeMode: ScriptTimeMode = config.timeMode.getOrElse(RunnerConfig.DefaultTimeMode)
implicit val system: ActorSystem = ActorSystem("ScriptRunner")
implicit val sequencer: ExecutionSequencerFactory =
new AkkaExecutionSequencerPool("ScriptRunnerPool")(system)
implicit val ec: ExecutionContext = system.dispatcher
implicit val materializer: Materializer = Materializer(system)
val inputValue = config.inputFile.map(file => {
val source = Source.fromFile(file)
val fileContent =
try {
source.mkString
} finally {
source.close()
}
fileContent.parseJson
})
val participantParams = config.participantConfig match {
case Some(file) => {
// We allow specifying --access-token-file/--application-id together with
// --participant-config and use the values as the default for
// all participants that do not specify an explicit token.
val source = Source.fromFile(file)
val fileContent =
try {
source.mkString
} finally {
source.close
}
val jsVal = fileContent.parseJson
val token = config.accessTokenFile.map(new TokenHolder(_)).flatMap(_.token)
import ParticipantsJsonProtocol._
jsVal
.convertTo[Participants[ApiParameters]]
.map(params =>
params.copy(
access_token = params.access_token.orElse(token),
application_id = params.application_id.orElse(config.applicationId),
)
)
}
case None =>
val tokenHolder = config.accessTokenFile.map(new TokenHolder(_))
Participants(
default_participant = Some(
ApiParameters(
config.ledgerHost.get,
config.ledgerPort.get,
tokenHolder.flatMap(_.token),
config.applicationId,
)
),
participants = Map.empty,
party_participants = Map.empty,
)
}
val flow: Future[Unit] = for {
clients <-
if (config.jsonApi) {
val ifaceDar = dar.map(pkg => InterfaceReader.readInterface(() => \/-(pkg))._2)
val envIface = EnvironmentInterface.fromReaderInterfaces(ifaceDar)
Runner.jsonClients(participantParams, envIface)
} else {
Runner.connect(participantParams, config.tlsConfig, config.maxInboundMessageSize)
}
result <- Runner.run(dar, scriptId, inputValue, clients, timeMode)
_ <- Future {
config.outputFile.foreach { outputFile =>
val jsVal = LfValueCodec.apiValueToJsValue(result.toValue)
val outDir = outputFile.getParentFile()
if (outDir != null) {
val _ = Files.createDirectories(outDir.toPath())
}
Files.write(outputFile.toPath, Seq(jsVal.prettyPrint).asJava)
}
}
} yield ()
flow.onComplete(_ =>
if (config.jsonApi) {
Http().shutdownAllConnectionPools().flatMap { case () => system.terminate() }
} else {
system.terminate()
}
)
Await.result(flow, Duration.Inf)
}
case Some(config) => main(config)
}
}
def main(config: RunnerConfig): Unit = {
val encodedDar: Dar[(PackageId, DamlLf.ArchivePayload)] =
DarReader().readArchiveFromFile(config.darPath).get
val dar: Dar[(PackageId, Package)] = encodedDar.map { case (pkgId, pkgArchive) =>
Decode.readArchivePayload(pkgId, pkgArchive)
}
val scriptId: Identifier =
Identifier(dar.main._1, QualifiedName.assertFromString(config.scriptIdentifier))
val timeMode: ScriptTimeMode = config.timeMode.getOrElse(RunnerConfig.DefaultTimeMode)
implicit val system: ActorSystem = ActorSystem("ScriptRunner")
implicit val sequencer: ExecutionSequencerFactory =
new AkkaExecutionSequencerPool("ScriptRunnerPool")(system)
implicit val ec: ExecutionContext = system.dispatcher
implicit val materializer: Materializer = Materializer(system)
val inputValue = config.inputFile.map(file => {
val source = Source.fromFile(file)
val fileContent =
try {
source.mkString
} finally {
source.close()
}
fileContent.parseJson
})
val participantParams = config.participantConfig match {
case Some(file) => {
// We allow specifying --access-token-file/--application-id together with
// --participant-config and use the values as the default for
// all participants that do not specify an explicit token.
val source = Source.fromFile(file)
val fileContent =
try {
source.mkString
} finally {
source.close
}
val jsVal = fileContent.parseJson
val token = config.accessTokenFile.map(new TokenHolder(_)).flatMap(_.token)
import ParticipantsJsonProtocol._
jsVal
.convertTo[Participants[ApiParameters]]
.map(params =>
params.copy(
access_token = params.access_token.orElse(token),
application_id = params.application_id.orElse(config.applicationId),
)
)
}
case None =>
val tokenHolder = config.accessTokenFile.map(new TokenHolder(_))
Participants(
default_participant = Some(
ApiParameters(
config.ledgerHost.get,
config.ledgerPort.get,
tokenHolder.flatMap(_.token),
config.applicationId,
)
),
participants = Map.empty,
party_participants = Map.empty,
)
}
val flow: Future[Unit] = for {
clients <-
if (config.jsonApi) {
val ifaceDar = dar.map(pkg => InterfaceReader.readInterface(() => \/-(pkg))._2)
val envIface = EnvironmentInterface.fromReaderInterfaces(ifaceDar)
Runner.jsonClients(participantParams, envIface)
} else {
Runner.connect(participantParams, config.tlsConfig, config.maxInboundMessageSize)
}
result <- Runner.run(dar, scriptId, inputValue, clients, timeMode)
_ <- Future {
config.outputFile.foreach { outputFile =>
val jsVal = LfValueCodec.apiValueToJsValue(result.toValue)
val outDir = outputFile.getParentFile()
if (outDir != null) {
val _ = Files.createDirectories(outDir.toPath())
}
Files.write(outputFile.toPath, Seq(jsVal.prettyPrint).asJava)
}
}
} yield ()
flow.onComplete(_ =>
if (config.jsonApi) {
Http().shutdownAllConnectionPools().flatMap { case () => system.terminate() }
} else {
system.terminate()
}
)
Await.result(flow, Duration.Inf)
}
}

View File

@ -14,6 +14,8 @@ exports_files(
"configs/static/typescript.py",
"scripts/check-closing-quotes.sh",
"scripts/check-closing-quotes.sh.allow",
"source/tools/export/output-root/Export.daml",
"source/tools/export/output-root/args.json",
],
)

View File

@ -112,7 +112,7 @@ Daml Documentation
:caption: Early Access Features
tools/extractor
tools/export
tools/export/index
daml-integration-kit/index
tools/visual
concepts/interoperability

View File

@ -0,0 +1,89 @@
-- Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
-- SPDX-License-Identifier: Apache-2.0
{-# LANGUAGE ApplicativeDo #-}
module Export where
import Daml.Script
import qualified ScriptExample
import qualified DA.Traversable
import qualified DA.Stack
import qualified DA.TextMap
-- | Mapping from party names in the original ledger state
-- to parties to be used in 'export'.
type Parties = DA.TextMap.TextMap Party
-- | Look-up a party based on the party name in the original ledger state.
lookupParty : DA.Stack.HasCallStack => Text -> Parties -> Party
lookupParty old parties =
case DA.TextMap.lookup old parties of
None -> error ("Missing party " <> old)
Some new -> new
-- | Allocates fresh parties from the party management service.
allocateParties : Script Parties
allocateParties = DA.Traversable.mapA allocateParty (DA.TextMap.fromList
[ ("Bank", "Bank")
, ("Alice", "Alice")
, ("Bob", "Bob") ])
-- | Mapping from missing contract ids to replacement contract ids.
--
-- You can provide replacement contract ids in an input file to
-- the @--input-file@ argument of @daml script@, or you can provide
-- replacements from within Daml script.
--
-- >>> (replacement, _):_ <- query @T alice_0
-- >>> let args = Args with
-- >>> parties = Parties with alice_0
-- >>> contracts = DA.TextMap.fromList [("00737...", replacement)]
-- >>> export args
type Contracts = DA.TextMap.TextMap (ContractId ())
-- | Look-up a replacement for a missing contract id. Fails if none is found.
lookupContract : DA.Stack.HasCallStack => Text -> Contracts -> ContractId a
lookupContract old contracts =
case DA.TextMap.lookup old contracts of
None -> error ("Missing contract id " <> old)
Some new -> coerceContractId new
-- | Arguments to 'export'. See 'Parties' and 'Contracts' for details.
data Args = Args with
parties : Parties
contracts : Contracts
-- | Test 'export' with freshly allocated parties and
-- no replacements for missing contract ids.
testExport : Script ()
testExport = do
parties <- allocateParties
let contracts = DA.TextMap.empty
export Args with ..
-- | The Daml ledger export.
export : Args -> Script ()
export Args{parties, contracts} = do
-- EXPORT_PARTIES_BEGIN
let bank_0 = lookupParty "Bank" parties
let alice_0 = lookupParty "Alice" parties
let bob_0 = lookupParty "Bob" parties
-- EXPORT_PARTIES_END
-- EXPORT_PROPOSALS_BEGIN
(coinProposal_1_0, coinProposal_1_1) <- submit bank_0 do
coinProposal_1_0 <- createCmd ScriptExample.CoinProposal with
coin = ScriptExample.Coin with
issuer = bank_0
owner = alice_0
coinProposal_1_1 <- createCmd ScriptExample.CoinProposal with
coin = ScriptExample.Coin with
issuer = bank_0
owner = bob_0
pure (coinProposal_1_0, coinProposal_1_1)
-- EXPORT_PROPOSALS_END
-- EXPORT_ACCEPT_BEGIN
_ <- submit alice_0 do
exerciseCmd coinProposal_1_0 ScriptExample.Accept
_ <- submit bob_0 do
exerciseCmd coinProposal_1_1 ScriptExample.Accept
-- EXPORT_ACCEPT_END
pure ()

View File

@ -0,0 +1,10 @@
{
"contracts": {
},
"parties": {
"Bank": "Bank",
"Alice": "Alice",
"Bob": "Bob"
}
}

View File

@ -0,0 +1,14 @@
# Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
sdk-version: 0.0.0
name: export
version: 1.0.0
source: .
init-script: Export:export
script-options: [--input-file, args.json]
parties: [Bank,Alice,Bob]
dependencies: [daml-stdlib, daml-prim, daml-script]
data-dependencies: [EXPORT_OUT/deps/daml-stdlib-0.0.0-68a8ef91f10c283e1a5fc914bbf0addd41a089ad3625641966df2415e6010e2e.dalf,EXPORT_OUT/deps/script-test-0.0.1-87967f7d2b53db9652bd343a24db7cbe371c4b105005d57eed853bd309bac477.dalf,EXPORT_OUT/deps/c1f1f00558799eec139fb4f4c76f95fb52fa1837a5dd29600baa1c8ed1bdccfd.dalf,EXPORT_OUT/deps/733e38d36a2759688a4b2c4cec69d48e7b55ecc8dedc8067b815926c917a182a.dalf,EXPORT_OUT/deps/bfcd37bd6b84768e86e432f5f6c33e25d9e7724a9d42e33875ff74f6348e733f.dalf,EXPORT_OUT/deps/518032f41fd0175461b35ae0c9691e08b4aea55e62915f8360af2cc7a1f2ba6c.dalf,EXPORT_OUT/deps/cc348d369011362a5190fe96dd1f0dfbc697fdfd10e382b9e9666f0da05961b7.dalf,EXPORT_OUT/deps/6839a6d3d430c569b2425e9391717b44ca324b88ba621d597778811b2d05031d.dalf,EXPORT_OUT/deps/99a2705ed38c1c26cbb8fe7acf36bbf626668e167a33335de932599219e0a235.dalf,EXPORT_OUT/deps/76bf0fd12bd945762a01f8fc5bbcdfa4d0ff20f8762af490f8f41d6237c6524f.dalf,EXPORT_OUT/deps/e22bce619ae24ca3b8e6519281cb5a33b64b3190cc763248b4c3f9ad5087a92c.dalf,EXPORT_OUT/deps/d58cf9939847921b2aab78eaa7b427dc4c649d25e6bee3c749ace4c3f52f5c97.dalf,EXPORT_OUT/deps/6c2c0667393c5f92f1885163068cd31800d2264eb088eb6fc740e11241b2bf06.dalf,EXPORT_OUT/deps/d14e08374fc7197d6a0de468c968ae8ba3aadbf9315476fd39071831f5923662.dalf,EXPORT_OUT/deps/057eed1fd48c238491b8ea06b9b5bf85a5d4c9275dd3f6183e0e6b01730cc2ba.dalf,EXPORT_OUT/deps/e491352788e56ca4603acc411ffe1a49fefd76ed8b163af86cf5ee5f4c38645b.dalf,EXPORT_OUT/deps/daml-prim-0.0.0-b76d13799ea5488f281440b63072ad3e3d48d72936144e7d19209c9cf115aa9d.dalf,EXPORT_OUT/deps/40f452260bef3f29dede136108fc08a88d5a5250310281067087da6f0baddff7.dalf,EXPORT_OUT/deps/daml-stdlib-DA-Set-Types-1.0.0-97b883cd8a2b7f49f90d5d39c981cf6e110cf1f1c64427a28a6d58ec88c43657.dalf,EXPORT_OUT/deps/daml-script-0.0.0-024f7a831e04595b36c668121dce9209d63e5a6a46f01a4533fca5e4c521e7e4.dalf,EXPORT_OUT/deps/8a7806365bbd98d88b4c13832ebfa305f6abaeaf32cfa2b7dd25c4fa489b79fb.dalf]
build-options: [--target=1.12,--package=script-test-0.0.1]

View File

@ -227,6 +227,7 @@ REFERENCE_LEDGER_EXPORT_PORT = 65102
client_server_build(
name = REFERENCE_LEDGER_EXPORT_NAME,
testonly = True, # only test targets can depend on this.
outs = ["%s.out" % REFERENCE_LEDGER_EXPORT_NAME],
client = "//ledger/ledger-api-test-tool",
client_args = [
"--concurrent-test-runs=4",