Drop Ledger Api Bench Tool (#16857)

This commit is contained in:
Remy 2023-05-16 10:09:13 +02:00 committed by GitHub
parent 36293a4e1a
commit c5ca9e4cf0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
108 changed files with 0 additions and 10723 deletions

View File

@ -1,171 +0,0 @@
# Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load(
"//bazel_tools:scala.bzl",
"da_scala_binary",
"da_scala_library",
"da_scala_test",
"da_scala_test_suite",
)
da_scala_binary(
name = "ledger-api-bench-tool",
srcs = [],
main_class = "com.daml.ledger.api.benchtool.LedgerApiBenchTool",
resources = [
"src/main/resources/logback.xml",
],
tags = [
"fat_jar",
"maven_coordinates=com.daml:ledger-api-bench-tool:__VERSION__",
"no_scala_version_suffix",
],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:ch_qos_logback_logback_classic",
],
deps = [
":ledger-api-bench-tool-lib",
],
)
da_scala_library(
name = "ledger-api-bench-tool-lib",
srcs = glob(["src/main/scala/com/daml/ledger/api/benchtool/**/*.scala"]),
resources = [],
scala_deps = [
"@maven//:com_github_scopt_scopt",
"@maven//:com_lihaoyi_fansi",
"@maven//:com_lihaoyi_pprint",
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_actor_typed",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:io_circe_circe_core",
"@maven//:io_circe_circe_yaml",
"@maven//:io_circe_circe_yaml_common",
"@maven//:org_typelevel_cats_core",
],
visibility = ["//visibility:public"],
runtime_deps = [
"@maven//:org_postgresql_postgresql",
],
deps = [
"//daml-lf/archive:daml_lf_archive_reader",
"//daml-lf/language",
"//language-support/scala/bindings",
"//ledger/error",
"//ledger/ledger-api-auth",
"//ledger/ledger-api-auth-client",
"//ledger/ledger-api-common",
"//ledger/ledger-api-errors",
"//ledger/participant-local-store",
"//libs-scala/jwt",
"//libs-scala/ledger-resources",
"//libs-scala/resources",
"//libs-scala/resources-akka",
"//libs-scala/resources-grpc",
"//libs-scala/timer-utils",
"//test-common:benchtool-tests-%s.scala" % "1.15", # TODO: make the LF version configurable
"//test-common:dar-files-%s-lib" % "1.15", # TODO: make the LF version configurable
"//observability/metrics",
"@maven//:io_grpc_grpc_api",
"@maven//:io_grpc_grpc_core",
"@maven//:io_grpc_grpc_netty",
"@maven//:io_netty_netty_handler",
"@maven//:io_opentelemetry_opentelemetry_api",
"@maven//:io_opentelemetry_opentelemetry_exporter_prometheus",
"@maven//:io_opentelemetry_opentelemetry_sdk_metrics",
"@maven//:org_slf4j_slf4j_api",
],
)
da_scala_library(
name = "ledger-api-bench-tool-test-lib",
srcs = glob(["src/test/lib/**/*.scala"]),
scala_deps = [
"@maven//:org_scalatest_scalatest_core",
"@maven//:org_scalactic_scalactic",
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
],
visibility = ["//visibility:public"],
deps = [
":ledger-api-bench-tool-lib",
"//bazel_tools/runfiles:scala_runfiles",
"//daml-lf/engine",
"//daml-lf/language",
"//daml-lf/transaction",
"//ledger-api/testing-utils",
"//libs-scala/ports",
"//test-common:dar-files-%s-lib" % "1.15",
"//language-support/scala/bindings",
"//ledger-api/rs-grpc-bridge",
"//ledger/ledger-runner-common",
"//ledger/participant-integration-api",
"//ledger/sandbox-on-x",
"//ledger/sandbox-on-x:sandbox-on-x-test-lib",
"//observability/metrics",
"@maven//:org_slf4j_slf4j_api",
],
)
da_scala_test_suite(
name = "ledger-api-bench-tool-tests",
size = "medium",
srcs = glob(
["src/test/suite/**/*.scala"],
),
data = [
"//daml-lf/encoder:testing-dars",
"//test-common:benchtool-tests-%s.dar" % "1.15",
"//test-common/test-certificates",
],
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:com_typesafe_akka_akka_actor_typed",
"@maven//:com_typesafe_akka_akka_actor_testkit_typed",
"@maven//:org_scalacheck_scalacheck",
"@maven//:org_scalatest_scalatest_core",
"@maven//:org_scalatest_scalatest_matchers_core",
"@maven//:org_scalatest_scalatest_shouldmatchers",
"@maven//:org_scalatest_scalatest_wordspec",
"@maven//:org_scalatestplus_scalacheck_1_15",
],
runtime_deps = [
"@maven//:ch_qos_logback_logback_classic",
],
deps = [
":ledger-api-bench-tool-lib",
":ledger-api-bench-tool-test-lib",
"//libs-scala/postgresql-testing",
"//libs-scala/timer-utils",
"//test-common",
"//test-common:benchtool-tests-%s.scala" % "1.15",
"//test-common:dar-files-%s-lib" % "1.15",
"//language-support/scala/bindings",
"//ledger-api/rs-grpc-bridge",
"//ledger-api/testing-utils",
"//ledger/ledger-api-auth",
"//ledger/ledger-api-client",
"//ledger/ledger-api-common",
"//ledger/ledger-api-domain",
"//ledger/ledger-runner-common",
"//ledger/metrics",
"//ledger/sandbox-on-x",
"//ledger/sandbox-on-x:sandbox-on-x-test-lib",
"//libs-scala/adjustable-clock",
"//libs-scala/ledger-resources",
"//libs-scala/ports",
"//libs-scala/resources",
"//libs-scala/scala-utils",
"//observability/metrics",
"//test-common:dar-files-default-lib",
"@maven//:com_typesafe_config",
"@maven//:io_dropwizard_metrics_metrics_core",
"@maven//:io_grpc_grpc_api",
"@maven//:io_grpc_grpc_core",
"@maven//:org_scalatest_scalatest_compatible",
],
)

View File

@ -1,261 +0,0 @@
# ledger-api-bench-tool
The `ledger-api-bench-tool` is a tool for measuring performance of a ledger.
It allows to run multiple concurrent streams reading transactions from a ledger and provides performance statistics
for such streams.
Please note that the `ledger-api-bench-tool` does not provide a load source for the ledger.
## Running
Run using `bazel run`:
```
bazel run -- //ledger/ledger-api-bench-tool --help
```
or using a fat jar:
```
bazel build //ledger/ledger-api-bench-tool:ledger-api-bench-tool_deploy.jar
java -jar bazel-bin/ledger/ledger-api-bench-tool/ledger-api-bench-tool_deploy.jar --help
```
## Configuration
Benchtool's .yaml configuration file have two main sections: `submission` and `streams`.
### Submission config
#### Observer, divulgee, extra-submitter parties
You can specify the number of observer, divulgee and extra-submitter parties to create as follows:
```yaml
submission:
...
num_observers: <NUMBER-OF-OBSERVERS>
num_divulgees: <NUMBER-OF-DIVULGEES>
num_extra_submitters: <NUMBER-OF-EXTRA-SUBMITTERS>
```
This tells the benchtool to created the specified number of each kind of parties.
The names of observers parties will follow the pattern of `Obs-<INDEX>` where `0 <= INDEX < NUMBER-OF-OBSERVERS`
and each party will have its own unique probability of `1 / 10^INDEX` for being selected as an observer of a contract.
For example, when creating four observer parties they will be named `Obs-0`, `Obs-1`, `Obs-2`, `Obs-3`
and their respective probabilities will be 100%, 10%, 1% and 0.1%.
Divulgee and extra-submitter parties behave analogously to the observer parties: their names will be respectively `Div-<INDEX>`
and `Sub-<INDEX>` and their probabilities will be computed in the same way.
#### Contracts submission
You can control the details of contract instances to submit by specifying:
- the contract templates to use, e.g. 'Foo1' and 'Foo2',
- the number of contracts to create for a given template by specifying its weight, e.g. for each 10 contracts 'Foo1'
create one contract 'Foo2',
- they size in bytes of the special payload argument to the contracts (which is realized as a random string of the given size).
There are three templates you can choose from: 'Foo1', 'Foo2' and 'Foo3'
and they all have identical implementations (except for their identifiers).
```yaml
submission:
...
num_instances: 100
instance_distribution:
- template: Foo1
weight: 10
payload_size_bytes: 1000
- template: Foo2
weight: 1
payload_size_bytes: 1000
```
#### Consuming and nonconsuming exercises
You can specify consuming and nonconsuming exercises to submit as below.
```yaml
submission:
...
nonconsuming_exercises:
probability: 2.3
payload_size_bytes: 1000
consuming_exercises:
probability: 0.4
payload_size_bytes: 200
```
For nonconsuming exercises `probability` can be any positive number.
For example probability of `2.3` means that, for each create contract command,
there will be at least two nonconsuming exercises submitted and 30% chance of a third one.
For consuming exercises `probablity` must in range `[0.0, 1.0]`.
#### Observer party sets
You can specify a large number of observer parties using observer party sets.
```yaml
submission:
...
observers_party_sets:
- party_name_prefix: Bar
count: 101
visibility: 0.01
- party_name_prefix: Baz
count: 12
visibility: 0.25
```
The above configuration snippet declares two party sets identified by their party name prefixes 'Bar' and 'Baz'.
The 'Bar' party set will result in creating 101 parties: {Bar-001, Bar-002, .., Bar-100}.
The 'Baz' party set will result in creating 12 parties: {Baz-01, Baz-02, .., Baz-11}.
Notice how the index of each party is left padded with zeroes to make the lexicographic party name order and
the numeric index order coincide. This is very helpful when you want to specify a subset of the parties from a party set
in a filter specification of a streaming config.
Each party from 'Bar' has 1% probability to become an observer on each new contract and each party from 'Baz'
has 25% probability.
### Streams config
#### Bounding stream runtime or size
You can specify when to finish streaming the elements either on timeout or element count criterion as follows:
```yaml
streams:
timeout: 10s
max_item_count: 100
```
#### Party prefix filters
You can match multiple parties for a stream with party prefix filter.
```yaml
streams:
...
party_prefix_filters:
- party_name_prefix: Bar-02
- party_name_prefix: Baz
```
Assuming the party sets from [Observer party sets](#observer-party-sets) sections, the above snippet is equivalent
to this one:
```yaml
streams:
...
filters:
- party: Bar-020
- party: Bar-021
- party: Bar-022
- party: Bar-023
- party: Bar-024
- party: Bar-025
- party: Bar-026
- party: Bar-027
- party: Bar-028
- party: Bar-029
- party: Baz-01
- party: Baz-02
- party: Baz-03
- party: Baz-04
- party: Baz-05
- party: Baz-06
- party: Baz-07
- party: Baz-08
- party: Baz-09
- party: Baz-10
- party: Baz-11
```
Here we used the prefixes of parties from party sets, but you are free to use arbitrary party
prefixes and the will matched against all the existing parties.
#### Objectives
You can specify the objective that the stream should finish within the given time, for example in 1000 milliseconds:
```yaml
streams:
...
objectives:
max_stream_duration: 1000ms
```
You can specify the objective of the minimum and maximum rate of elements in the stream, for example at least 4000
elements per second and at most 8000 elements per second.
```yaml
streams:
...
objectives:
min_item_rate: 4000
max_item_rate: 8000
```
#### Ledger offset value
You can use any valid ledger offset.
Additionally, you can use two special values `"ledger-begin"` and `"ledger-end"`
## Metrics
### `CountRateMetric`
Number of elements processed per second.
Unit: `[elem/s]`
```periodic value = (number of elements in the last period) / (period duration)```
```final value = (total number of elements processed) / (total duration)```
### `TotalCountMetric`
Total number of processed elements.
Unit: `[-]`
```
periodic value = (number of elements processed so far)
final value = (total number of elements processed)
```
### `SizeMetric`
Amount of data processed per second.
Unit: `[MB/s]`
```
periodic value = (number of megabytes processed in the last period) / (period duration)
final value = (total number of megabytes processed) / (total duration)
```
### `DelayMetric`
Record time delay of a stream element is defined as follows:
```
record time delay = (current time) - (record time of a processed element)
```
The delay metric measures mean delay of elements processed in a period of time.
Unit: `[s]`
```
periodic value = (mean record time delay of elements processed in a period of time)
final value = N/A
```
Note that in case of running the `ledger-api-bench-tool` against a ledger with live data, the delay metric
is expected to converge to `0s` which is equivalent to being up-to-date with the most recent data.
### `ConsumptionSpeedMetric`
Describe the ratio between a time span covered by record times of processed elements to the period duration.
Unit: `[-]`
Additional definitions:
```
previous latest record time = (record time of the latest element from periods before the current period)
latest record time = (record time of the latest element in the current period OR undefined in case of a period without a single element)
```
```
periodic value =
if (latest record time != undefined) ((latest record time) - (previous latest record time)) / (period duration)
else 0.0
final value = N/A
```

View File

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="stdout-appender" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date{"yyyy-MM-dd'T'HH:mm:ss.SSSXXX", UTC} %-5level %logger{5}@[%-4.30thread] - %msg%n</pattern>
</encoder>
</appender>
<root level="${LOGLEVEL:-DEBUG}">
<appender-ref ref="stdout-appender"/>
</root>
<logger name="io.netty" level="WARN">
<appender-ref ref="stdout-appender"/>
</logger>
<logger name="io.grpc.netty" level="WARN">
<appender-ref ref="stdout-appender"/>
</logger>
</configuration>

View File

@ -1,43 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import com.daml.jwt.JwtSigner
import com.daml.jwt.domain.DecodedJwt
import com.daml.ledger.api.auth.client.LedgerCallCredentials
import com.daml.ledger.api.auth.{AuthServiceJWTCodec, StandardJWTPayload, StandardJWTTokenFormat}
import io.grpc.stub.AbstractStub
object AuthorizationHelper {
def maybeAuthedService[T <: AbstractStub[T]](userTokenO: Option[String])(service: T): T = {
userTokenO.fold(service)(token => LedgerCallCredentials.authenticatingStub(service, token))
}
}
class AuthorizationHelper(val authorizationTokenSecret: String) {
/** @return user token signed with HMAC256
*/
def tokenFor(userId: String): String = {
val payload = StandardJWTPayload(
issuer = None,
participantId = None,
userId = userId,
exp = None,
format = StandardJWTTokenFormat.Scope,
audiences = List.empty,
)
JwtSigner.HMAC256
.sign(
jwt = DecodedJwt(
header = """{"alg": "HS256", "typ": "JWT"}""",
payload = AuthServiceJWTCodec.compactPrint(payload),
),
secret = authorizationTokenSecret,
)
.getOrElse(sys.error("Failed to generate token"))
.value
}
}

View File

@ -1,147 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import akka.actor.typed.{ActorSystem, SpawnProtocol}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig
import com.daml.ledger.api.benchtool.metrics.{BenchmarkResult, MetricsSet, StreamMetrics}
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.active_contracts_service.GetActiveContractsResponse
import com.daml.ledger.api.v1.command_completion_service.CompletionStreamResponse
import com.daml.ledger.api.v1.transaction_service.{
GetTransactionTreesResponse,
GetTransactionsResponse,
}
import com.daml.metrics.api.MetricHandle.MetricsFactory
import com.daml.timer.Delayed
import org.slf4j.LoggerFactory
import scala.annotation.nowarn
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{ExecutionContext, Future}
object Benchmark {
private val logger = LoggerFactory.getLogger(getClass)
def run(
streamConfigs: List[StreamConfig],
reportingPeriod: FiniteDuration,
apiServices: LedgerApiServices,
@nowarn metricsFactory: MetricsFactory,
system: ActorSystem[SpawnProtocol.Command],
)(implicit ec: ExecutionContext): Future[Either[String, Unit]] =
Future
.traverse(streamConfigs) {
case streamConfig: StreamConfig.TransactionsStreamConfig =>
for {
_ <- delaySubscriptionIfConfigured(streamConfig)(system)
observer <- StreamMetrics
.observer[GetTransactionsResponse](
streamName = streamConfig.name,
logInterval = reportingPeriod,
metrics = MetricsSet.transactionMetrics(streamConfig.objectives),
logger = logger,
exposedMetrics = Some(
MetricsSet
.transactionExposedMetrics(streamConfig.name, metricsFactory)
),
itemCountingFunction = MetricsSet.countFlatTransactionsEvents,
maxItemCount = streamConfig.maxItemCount,
)(system, ec)
_ = streamConfig.timeoutO
.foreach(timeout => scheduleCancelStreamTask(timeout, observer))
result <- apiServices.transactionService.transactions(streamConfig, observer)
} yield result
case streamConfig: StreamConfig.TransactionTreesStreamConfig =>
for {
_ <- delaySubscriptionIfConfigured(streamConfig)(system)
observer <- StreamMetrics
.observer[GetTransactionTreesResponse](
streamName = streamConfig.name,
logInterval = reportingPeriod,
metrics = MetricsSet.transactionTreesMetrics(streamConfig.objectives),
logger = logger,
exposedMetrics = Some(
MetricsSet.transactionTreesExposedMetrics(
streamConfig.name,
metricsFactory,
)
),
itemCountingFunction = MetricsSet.countTreeTransactionsEvents,
maxItemCount = streamConfig.maxItemCount,
)(system, ec)
_ = streamConfig.timeoutO
.foreach(timeout => scheduleCancelStreamTask(timeout, observer))
result <- apiServices.transactionService.transactionTrees(streamConfig, observer)
} yield result
case streamConfig: StreamConfig.ActiveContractsStreamConfig =>
for {
_ <- delaySubscriptionIfConfigured(streamConfig)(system)
observer <- StreamMetrics
.observer[GetActiveContractsResponse](
streamName = streamConfig.name,
logInterval = reportingPeriod,
metrics = MetricsSet.activeContractsMetrics(streamConfig.objectives),
logger = logger,
exposedMetrics = Some(
MetricsSet.activeContractsExposedMetrics(
streamConfig.name,
metricsFactory,
)
),
itemCountingFunction = response => MetricsSet.countActiveContracts(response).toLong,
maxItemCount = streamConfig.maxItemCount,
)(system, ec)
_ = streamConfig.timeoutO
.foreach(timeout => scheduleCancelStreamTask(timeout, observer))
result <- apiServices.activeContractsService.getActiveContracts(streamConfig, observer)
} yield result
case streamConfig: StreamConfig.CompletionsStreamConfig =>
for {
_ <- delaySubscriptionIfConfigured(streamConfig)(system)
observer <- StreamMetrics
.observer[CompletionStreamResponse](
streamName = streamConfig.name,
logInterval = reportingPeriod,
metrics = MetricsSet.completionsMetrics(streamConfig.objectives),
logger = logger,
exposedMetrics = Some(
MetricsSet
.completionsExposedMetrics(streamConfig.name, metricsFactory)
),
itemCountingFunction = response => MetricsSet.countCompletions(response).toLong,
maxItemCount = streamConfig.maxItemCount,
)(system, ec)
_ = streamConfig.timeoutO
.foreach(timeout => scheduleCancelStreamTask(timeout, observer))
result <- apiServices.commandCompletionService.completions(streamConfig, observer)
} yield result
}
.map { results =>
if (results.contains(BenchmarkResult.ObjectivesViolated))
Left("Metrics objectives not met.")
else Right(())
}
def scheduleCancelStreamTask(timeoutDuration: Duration, observer: ObserverWithResult[_, _])(
implicit ec: ExecutionContext
): Unit = {
val _ = Delayed.by(t = timeoutDuration)(
observer.cancel()
)
}
private def delaySubscriptionIfConfigured(
streamConfig: StreamConfig
)(implicit system: ActorSystem[SpawnProtocol.Command]): Future[Unit] =
streamConfig.subscriptionDelay match {
case Some(delay) =>
logger.info(
s"Delaying stream subscription with $delay for stream $streamConfig"
)
akka.pattern.after(delay)(Future.unit)
case None => Future.unit
}
}

View File

@ -1,125 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig.{
ActiveContractsStreamConfig,
CompletionsStreamConfig,
PartyFilter,
PartyNamePrefixFilter,
TransactionTreesStreamConfig,
TransactionsStreamConfig,
}
import com.daml.ledger.api.benchtool.submission.{AllocatedParties, BenchtoolTestsPackageInfo}
import com.daml.ledger.client.binding.Primitive.TemplateId
import com.daml.ledger.test.benchtool.Foo.{Foo1, Foo2, Foo3}
import com.daml.ledger.test.benchtool.InterfaceSubscription.{FooI1, FooI2, FooI3}
import scalaz.syntax.tag._
class ConfigEnricher(
allocatedParties: AllocatedParties,
packageInfo: BenchtoolTestsPackageInfo,
) {
private def toTemplateId[T](templateId: TemplateId[T]): (String, String) = {
val id = templateId.unwrap
id.entityName -> s"${packageInfo.packageId}:${id.moduleName}:${id.entityName}"
}
private val interfaceNameToFullyQualifiedNameMap: Map[String, String] = List(
FooI1.id,
FooI2.id,
FooI3.id,
).map(toTemplateId).toMap
private val templateNameToFullyQualifiedNameMap: Map[String, String] = List(
Foo1.id,
Foo2.id,
Foo3.id,
).map(toTemplateId).toMap
def enrichStreamConfig(
streamConfig: StreamConfig
): StreamConfig = {
streamConfig match {
case config: TransactionsStreamConfig =>
config
.copy(
filters = enrichFilters(config.filters) ++ config.partyNamePrefixFilters.flatMap(
convertFilterByPartySet
),
partyNamePrefixFilters = List.empty,
)
case config: TransactionTreesStreamConfig =>
config
.copy(
filters = enrichFilters(config.filters) ++ config.partyNamePrefixFilters.flatMap(
convertFilterByPartySet
),
partyNamePrefixFilters = List.empty,
)
case config: ActiveContractsStreamConfig =>
config
.copy(
filters = enrichFilters(config.filters) ++ config.partyNamePrefixFilters.flatMap(
convertFilterByPartySet
),
partyNamePrefixFilters = List.empty,
)
case config: CompletionsStreamConfig =>
config.copy(parties = config.parties.map(party => convertParty(party)))
}
}
private def convertParty(
partyShortName: String
): String =
allocatedParties.allAllocatedParties
.map(_.unwrap)
.find(_.contains(partyShortName))
.getOrElse(partyShortName)
private def convertFilterByPartySet(
filter: PartyNamePrefixFilter
): List[PartyFilter] = {
val convertedTemplates = filter.templates.map(convertTemplate)
val convertedInterfaces = filter.interfaces.map(convertInterface)
val matchedParties = matchingParties(filter.partyNamePrefix)
matchedParties.map(party =>
PartyFilter(party = party, templates = convertedTemplates, interfaces = convertedInterfaces)
)
}
private def matchingParties(partyNamePrefix: String): List[String] = {
val knownParties = allocatedParties.allAllocatedParties.map(_.unwrap)
val matchedParties = knownParties.filter(_.startsWith(partyNamePrefix))
if (matchedParties.isEmpty) {
val knownPartiesText = knownParties.mkString(", ")
sys.error(
s"Expected party name prefix: '${partyNamePrefix}' does not match any of the known parties: $knownPartiesText"
)
} else
matchedParties
}
private def enrichFilters(
filters: List[StreamConfig.PartyFilter]
): List[StreamConfig.PartyFilter] = {
filters.map { filter =>
StreamConfig.PartyFilter(
party = convertParty(filter.party),
templates = filter.templates.map(convertTemplate),
interfaces = filter.interfaces.map(convertInterface),
)
}
}
def convertTemplate(shortTemplateName: String): String =
templateNameToFullyQualifiedNameMap.getOrElse(shortTemplateName, shortTemplateName)
def convertInterface(shortInterfaceName: String): String =
interfaceNameToFullyQualifiedNameMap.getOrElse(shortInterfaceName, shortInterfaceName)
}

View File

@ -1,433 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import java.util.concurrent._
import akka.actor.typed.{ActorSystem, SpawnProtocol}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.{
FibonacciSubmissionConfig,
FooSubmissionConfig,
}
import com.daml.ledger.api.benchtool.config.{Config, ConfigMaker, WorkflowConfig}
import com.daml.ledger.api.benchtool.metrics.MetricsManager.NoOpMetricsManager
import com.daml.ledger.api.benchtool.metrics.{
BenchmarkResult,
LatencyMetric,
MetricRegistryOwner,
MetricsManager,
}
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.benchtool.submission._
import com.daml.ledger.api.benchtool.submission.foo.RandomPartySelecting
import com.daml.ledger.api.benchtool.util.TypedActorSystemResourceOwner
import com.daml.ledger.api.tls.TlsConfiguration
import com.daml.ledger.resources.{ResourceContext, ResourceOwner}
import com.daml.metrics.api.MetricHandle.MetricsFactory
import com.daml.metrics.api.opentelemetry.OpenTelemetryMetricsFactory
import com.daml.platform.localstore.api.UserManagementStore
import io.grpc.Channel
import io.grpc.netty.{NegotiationType, NettyChannelBuilder}
import io.opentelemetry.api.metrics.MeterProvider
import org.slf4j.{Logger, LoggerFactory}
import scala.annotation.nowarn
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.control.NonFatal
/** Runs a submission step followed by a benchmark step.
* Either step is optional.
*
* Uses "benchtool" ([[Names.benchtoolApplicationId]]) applicationId for both steps.
*/
object LedgerApiBenchTool {
private val printer = pprint.PPrinter.BlackWhite
private[benchtool] val logger: Logger = LoggerFactory.getLogger(getClass)
private[benchtool] def prettyPrint(x: Any): String = printer(x).toString()
def main(args: Array[String]): Unit = {
import scala.concurrent.ExecutionContext.Implicits.global
ConfigMaker.make(args) match {
case Left(error) =>
logger.error(s"Configuration error: ${error.details}")
sys.exit(1)
case Right(config) =>
logger.info(s"Starting benchmark with configuration:\n${prettyPrint(config)}")
val result = LedgerApiBenchTool(config)
.run()(ExecutionContext.Implicits.global)
.map {
case Right(()) =>
logger.info(s"Benchmark finished successfully.")
case Left(error) =>
logger.info(s"Benchmark failed: $error")
}
.recover { case ex =>
logger.error(s"ledger-api-bench-tool failure: ${ex.getMessage}", ex)
sys.exit(1)
}(scala.concurrent.ExecutionContext.Implicits.global)
Await.result(result, atMost = Duration.Inf)
()
}
}
def apply(config: Config): LedgerApiBenchTool = {
new LedgerApiBenchTool(
names = new Names,
authorizationHelper = config.authorizationTokenSecret.map(new AuthorizationHelper(_)),
config = config,
)
}
}
class LedgerApiBenchTool(
names: Names,
authorizationHelper: Option[AuthorizationHelper],
config: Config,
) {
import LedgerApiBenchTool.{logger, prettyPrint}
def run()(implicit ec: ExecutionContext): Future[Either[String, Unit]] = {
implicit val resourceContext: ResourceContext = ResourceContext(ec)
val resources: ResourceOwner[
(
String => LedgerApiServices,
ActorSystem[SpawnProtocol.Command],
MeterProvider,
)
] = for {
servicesForUserId <- apiServicesOwner(config, authorizationHelper)
system <- TypedActorSystemResourceOwner.owner()
meterProvider <- new MetricRegistryOwner(
reporter = config.metricsReporter,
reportingInterval = config.reportingPeriod,
logger = logger,
)
} yield (servicesForUserId, system, meterProvider)
resources.use { case (servicesForUserId, actorSystem, meterProvider) =>
val adminServices = servicesForUserId(UserManagementStore.DefaultParticipantAdminUserId)
val regularUserServices = servicesForUserId(names.benchtoolUserId)
val metricsFactory = new OpenTelemetryMetricsFactory(
meterProvider.meterBuilder("ledger-api-bench-tool").build()
)
val partyAllocating = new PartyAllocating(
names = names,
adminServices = adminServices,
)
for {
_ <- regularUserSetupStep(adminServices)
(allocatedParties, benchtoolTestsPackageInfo) <- {
config.workflow.submission match {
case None =>
logger.info("No submission config found; skipping the command submission step")
for {
allocatedParties <- SubmittedDataAnalyzing.determineAllocatedParties(
config.workflow,
partyAllocating,
)
benchtoolDamlPackageInfo <- SubmittedDataAnalyzing.determineBenchtoolTestsPackageId(
regularUserServices.packageService
)
} yield {
(allocatedParties, benchtoolDamlPackageInfo)
}
case Some(submissionConfig) =>
logger.info("Submission config found; command submission will be performed")
submissionStep(
regularUserServices = regularUserServices,
adminServices = adminServices,
submissionConfig = submissionConfig,
metricsFactory = metricsFactory,
partyAllocating = partyAllocating,
)
.map(_ -> BenchtoolTestsPackageInfo.StaticDefault)
.map { v =>
// We manually execute a 'VACUUM ANALYZE' at the end of the submission step (if IndexDB is on Postgresql),
// to make sure query planner statistics, visibility map, etc.. are all up-to-date.
config.ledger.indexDbJdbcUrlO.foreach { indexDbJdbcUrl =>
if (indexDbJdbcUrl.startsWith("jdbc:postgresql:")) {
PostgresUtils.invokeVacuumAnalyze(indexDbJdbcUrl)
}
}
v
}
}
}
configEnricher = new ConfigEnricher(allocatedParties, benchtoolTestsPackageInfo)
updatedStreamConfigs = config.workflow.streams.map(streamsConfig =>
configEnricher.enrichStreamConfig(streamsConfig)
)
_ = logger.info(
s"Stream configs adapted after the submission step: ${prettyPrint(updatedStreamConfigs)}"
)
benchmarkResult <-
if (config.latencyTest) {
benchmarkLatency(
regularUserServices = regularUserServices,
adminServices = adminServices,
submissionConfigO = config.workflow.submission,
metricsFactory = metricsFactory,
allocatedParties = allocatedParties,
actorSystem = actorSystem,
maxLatencyObjectiveMillis = config.maxLatencyObjectiveMillis,
)
} else if (config.workflow.pruning.isDefined) {
new PruningBenchmark(reportingPeriod = config.reportingPeriod).benchmarkPruning(
pruningConfig =
config.workflow.pruning.getOrElse(sys.error("Pruning config not defined!")),
regularUserServices = regularUserServices,
adminServices = adminServices,
actorSystem = actorSystem,
signatory = allocatedParties.signatory,
names = names,
)
} else {
benchmarkStreams(
regularUserServices = regularUserServices,
streamConfigs = updatedStreamConfigs,
metricsFactory = metricsFactory,
actorSystem = actorSystem,
)
}
} yield benchmarkResult
}
}
private def regularUserSetupStep(
adminServices: LedgerApiServices
)(implicit ec: ExecutionContext): Future[Unit] =
(config.authorizationTokenSecret, config.workflow.submission) match {
case (Some(_), Some(submissionConfig)) =>
// We only need to setup the user when the UserManagementService is used and we're going to submit transactions
// The submission config is necessary to establish a set of rights that will be granted to the user.
logger.info(
s"Setting up the regular '${names.benchtoolUserId}' user prior to the submission phase."
)
adminServices.userManagementService.createUserOrGrantRightsToExisting(
userId = names.benchtoolUserId,
observerPartyNames = names.observerPartyNames(
submissionConfig.numberOfObservers,
submissionConfig.uniqueParties,
),
signatoryPartyName = names.signatoryPartyName,
)
case _ =>
Future.successful(
logger.info(
s"The '${names.benchtoolUserId}' user is going to be used for authentication."
)
)
}
private def benchmarkStreams(
regularUserServices: LedgerApiServices,
streamConfigs: List[WorkflowConfig.StreamConfig],
@nowarn metricsFactory: MetricsFactory,
actorSystem: ActorSystem[SpawnProtocol.Command],
)(implicit ec: ExecutionContext): Future[Either[String, Unit]] =
if (streamConfigs.isEmpty) {
logger.info(s"No streams defined. Skipping the benchmark step.")
Future.successful(Right(()))
} else
Benchmark
.run(
streamConfigs = streamConfigs,
reportingPeriod = config.reportingPeriod,
apiServices = regularUserServices,
metricsFactory = metricsFactory,
system = actorSystem,
)
private def benchmarkLatency(
regularUserServices: LedgerApiServices,
adminServices: LedgerApiServices,
submissionConfigO: Option[WorkflowConfig.SubmissionConfig],
@nowarn metricsFactory: MetricsFactory,
allocatedParties: AllocatedParties,
actorSystem: ActorSystem[SpawnProtocol.Command],
maxLatencyObjectiveMillis: Long,
)(implicit ec: ExecutionContext): Future[Either[String, Unit]] =
submissionConfigO match {
case Some(submissionConfig: FooSubmissionConfig) =>
val generator: CommandGenerator = new FooCommandGenerator(
config = submissionConfig,
divulgeesToDivulgerKeyMap = Map.empty,
names = names,
allocatedParties = allocatedParties,
partySelecting = new RandomPartySelecting(
config = submissionConfig,
allocatedParties = allocatedParties,
randomnessProvider = RandomnessProvider.Default,
),
randomnessProvider = RandomnessProvider.Default,
)
for {
metricsManager <- MetricsManager.create(
observedMetric = "submit-and-wait-latency",
logInterval = config.reportingPeriod,
metrics = List(LatencyMetric.empty(maxLatencyObjectiveMillis)),
exposedMetrics = None,
)(actorSystem, ec)
submitter = CommandSubmitter(
names = names,
benchtoolUserServices = regularUserServices,
adminServices = adminServices,
metricsFactory = metricsFactory,
metricsManager = metricsManager,
waitForSubmission = true,
partyAllocating = new PartyAllocating(
names = names,
adminServices = adminServices,
),
)
result <- submitter
.generateAndSubmit(
generator = generator,
config = submissionConfig,
baseActAs = List(allocatedParties.signatory),
maxInFlightCommands = config.maxInFlightCommands,
submissionBatchSize = config.submissionBatchSize,
)
.flatMap(_ => metricsManager.result())
.map {
case BenchmarkResult.ObjectivesViolated =>
Left("Metrics objectives not met.")
case BenchmarkResult.Ok =>
Right(())
}
.recoverWith { case NonFatal(e) =>
Future.successful(Left(e.getMessage))
}
} yield result
case Some(other) =>
Future.failed(
new RuntimeException(s"Unsupported submission config for latency benchmarking: $other")
)
case None =>
Future.failed(
new RuntimeException("Submission config cannot be empty for latency benchmarking")
)
}
def submissionStep(
regularUserServices: LedgerApiServices,
adminServices: LedgerApiServices,
submissionConfig: WorkflowConfig.SubmissionConfig,
@nowarn metricsFactory: MetricsFactory,
partyAllocating: PartyAllocating,
)(implicit
ec: ExecutionContext
): Future[AllocatedParties] = {
val submitter = CommandSubmitter(
names = names,
benchtoolUserServices = regularUserServices,
adminServices = adminServices,
metricsFactory = metricsFactory,
metricsManager = NoOpMetricsManager(),
waitForSubmission = submissionConfig.waitForSubmission,
partyAllocating = partyAllocating,
)
for {
allocatedParties <- submitter.prepare(
submissionConfig
)
_ <-
submissionConfig match {
case submissionConfig: FooSubmissionConfig =>
new FooSubmission(
submitter = submitter,
maxInFlightCommands = config.maxInFlightCommands,
submissionBatchSize = config.submissionBatchSize,
allocatedParties = allocatedParties,
names = names,
randomnessProvider = RandomnessProvider.Default,
).performSubmission(submissionConfig)
case submissionConfig: FibonacciSubmissionConfig =>
val generator: CommandGenerator = new FibonacciCommandGenerator(
signatory = allocatedParties.signatory,
config = submissionConfig,
names = names,
)
for {
_ <- submitter
.generateAndSubmit(
generator = generator,
config = submissionConfig,
baseActAs = List(allocatedParties.signatory) ++ allocatedParties.divulgees,
maxInFlightCommands = config.maxInFlightCommands,
submissionBatchSize = config.submissionBatchSize,
)
} yield ()
}
} yield allocatedParties
}
private def apiServicesOwner(
config: Config,
authorizationHelper: Option[AuthorizationHelper],
)(implicit ec: ExecutionContext): ResourceOwner[String => LedgerApiServices] =
for {
executorService <- threadPoolExecutorOwner(config.concurrency)
channel <- channelOwner(config.ledger, config.tls, executorService)
servicesForUserId <- ResourceOwner.forFuture(() =>
LedgerApiServices.forChannel(
channel = channel,
authorizationHelper = authorizationHelper,
)
)
} yield servicesForUserId
private def channelOwner(
ledger: Config.Ledger,
tls: TlsConfiguration,
executor: Executor,
): ResourceOwner[Channel] = {
logger.info(
s"Setting up a managed channel to a ledger at: ${ledger.hostname}:${ledger.port}..."
)
val MessageChannelSizeBytes: Int = 32 * 1024 * 1024 // 32 MiB
val ShutdownTimeout: FiniteDuration = 5.seconds
val channelBuilder = NettyChannelBuilder
.forAddress(ledger.hostname, ledger.port)
.executor(executor)
.maxInboundMessageSize(MessageChannelSizeBytes)
.usePlaintext()
if (tls.enabled) {
tls.client().map { sslContext =>
logger.info(s"Setting up a managed channel with transport security...")
channelBuilder
.useTransportSecurity()
.sslContext(sslContext)
.negotiationType(NegotiationType.TLS)
}
}
ResourceOwner.forChannel(channelBuilder, ShutdownTimeout)
}
private def threadPoolExecutorOwner(
config: Config.Concurrency
): ResourceOwner[ThreadPoolExecutor] =
ResourceOwner.forExecutorService(() =>
new ThreadPoolExecutor(
config.corePoolSize,
config.maxPoolSize,
config.keepAliveTime,
TimeUnit.SECONDS,
if (config.maxQueueLength == 0) new SynchronousQueue[Runnable]()
else new ArrayBlockingQueue[Runnable](config.maxQueueLength),
)
)
}

View File

@ -1,69 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import java.sql.{Connection, DriverManager, Statement}
import com.daml.ledger.api.benchtool.LedgerApiBenchTool.logger
object PostgresUtils {
def invokeVacuumAnalyze(indexDbJdbcUrl: String): Unit = {
val connection = DriverManager.getConnection(indexDbJdbcUrl)
try {
val stmt = connection.createStatement()
val vacuumQuery = "VACUUM ANALYZE"
try {
logger.info(
s"Executing '$vacuumQuery' on the IndexDB identified by JDBC URL: '${indexDbJdbcUrl}' ..."
)
stmt.executeUpdate(vacuumQuery)
logger.info(s"Executed '$vacuumQuery'")
} finally {
stmt.close()
inspectVacuumAndAnalyzeState(connection)
}
} finally {
connection.close()
}
}
private def inspectVacuumAndAnalyzeState(connection: Connection): Unit = {
val stmt = connection.createStatement()
val query =
"SELECT relname, last_vacuum, last_autovacuum, last_analyze, last_autoanalyze FROM pg_stat_user_tables ORDER BY relname"
try {
logger.info(
"Executing SQL query: " + query
)
stmt.execute(
query
)
printQueryResult(stmt)
} finally {
stmt.close()
}
}
private def printQueryResult(s: Statement): Unit = {
val rs = s.getResultSet
val meta = rs.getMetaData
val colCount = meta.getColumnCount
val buffer = new StringBuffer()
try {
while (rs.next()) {
val text = 1
.to(colCount)
.map(colNumber =>
f"${meta.getColumnName(colNumber)} ${rs.getString(colNumber) + ","}%-45s"
)
.mkString(" ")
buffer.append(text).append("\n")
}
} finally {
logger.info(buffer.toString)
}
}
}

View File

@ -1,62 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import akka.actor.typed.{ActorSystem, SpawnProtocol}
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.metrics.{BenchmarkResult, MetricsManager, MetricsSet}
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.benchtool.submission.Names
import com.daml.ledger.api.v1.admin.participant_pruning_service.PruneRequest
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.test.benchtool.Foo.Dummy
import com.daml.ledger.api.v1.commands.Commands
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
class PruningBenchmark(reportingPeriod: FiniteDuration) {
def benchmarkPruning(
signatory: Primitive.Party,
pruningConfig: WorkflowConfig.PruningConfig,
regularUserServices: LedgerApiServices,
adminServices: LedgerApiServices,
actorSystem: ActorSystem[SpawnProtocol.Command],
names: Names,
)(implicit ec: ExecutionContext): Future[Either[String, Unit]] = for {
endOffset <- regularUserServices.transactionService.getLedgerEnd()
// Submit on more command so that we're not pruning exactly at the ledger end offset
_ <- adminServices.commandService.submitAndWait(
Commands(
applicationId = names.benchtoolApplicationId,
commandId = "pruning-benchmarking-dummy-command",
commands = Seq(Dummy(signatory).create.command),
actAs = Seq(signatory.toString),
)
)
durationMetric = MetricsSet.createTotalRuntimeMetric[Unit](pruningConfig.maxDurationObjective)
metricsManager <- MetricsManager.create(
observedMetric = "benchtool-pruning",
logInterval = reportingPeriod,
metrics = List(durationMetric),
exposedMetrics = None,
)(actorSystem, ec)
_ <- adminServices.pruningService
.prune(
new PruneRequest(
pruneUpTo = endOffset,
submissionId = "benchtool-pruning",
pruneAllDivulgedContracts = pruningConfig.pruneAllDivulgedContracts,
)
)
.map { _ =>
metricsManager.sendNewValue(())
metricsManager.result().map {
case BenchmarkResult.ObjectivesViolated => Left("Metrics objectives not met.")
case BenchmarkResult.Ok => Right(())
}
}
} yield Right(())
}

View File

@ -1,92 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.services.PackageService
import com.daml.ledger.api.benchtool.submission.BenchtoolTestsPackageInfo.BenchtoolTestsPackageName
import com.daml.ledger.api.benchtool.submission.{
AllocatedParties,
BenchtoolTestsPackageInfo,
PartyAllocating,
}
import com.daml.ledger.api.v1.package_service.GetPackageResponse
import com.daml.lf.data.Ref
import com.daml.lf.language.Ast
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.{ExecutionContext, Future}
/** Contains utilities for retrieving useful facts
* from data already submitted to a Ledger API server.
* (The motivating use case are the benchmarks that do not perform a submission step on their own
* and for that reason cannot statically determine these facts.)
*/
object SubmittedDataAnalyzing {
private[benchtool] val logger: Logger = LoggerFactory.getLogger(getClass)
def determineAllocatedParties(
workflowConfig: WorkflowConfig,
partyAllocating: PartyAllocating,
)(implicit ec: ExecutionContext): Future[AllocatedParties] = {
logger.info("Analyzing existing parties..")
for {
existingParties <- {
logger.info("Analyzing existing parties..")
partyAllocating.lookupExistingParties()
}
} yield {
AllocatedParties.forExistingParties(
parties = existingParties.toList,
partyPrefixesForPartySets =
workflowConfig.streams.flatMap(_.partySetPrefixes.iterator).distinct,
)
}
}
def determineBenchtoolTestsPackageId(
packageService: PackageService
)(implicit ec: ExecutionContext): Future[BenchtoolTestsPackageInfo] = {
logger.info("Analyzing existing Daml packages..")
for {
packageIds: Seq[String] <- packageService.listPackages().map(_.packageIds)
getPackageResponses: Seq[GetPackageResponse] <- Future.sequence(
packageIds.map(packageId => packageService.getPackage(packageId = packageId))
)
} yield {
val packageNamesToPackageIds: Seq[(String, String)] = for {
getPackageResponse <- getPackageResponses
} yield {
val packageId = getPackageResponse.hash
val packageName = decodePackageName(
archivePayloadBytes = getPackageResponse.archivePayload.toByteArray,
pkgId = Ref.PackageId.assertFromString(packageId),
)
packageName -> packageId
}
val candidatesPackageIds =
packageNamesToPackageIds.collect { case (BenchtoolTestsPackageName, pkgId) => pkgId }
if (candidatesPackageIds.size > 1) {
logger.warn(s"Found more than one Daml package with name '$BenchtoolTestsPackageName'")
}
val detectedPackageInfoO = candidatesPackageIds.headOption.map(BenchtoolTestsPackageInfo(_))
detectedPackageInfoO.getOrElse {
logger.info(
s"Could not find a Daml package with name '$BenchtoolTestsPackageName'; defaulting its packageId to the static one"
)
BenchtoolTestsPackageInfo.StaticDefault
}
}
}
private def decodePackageName(archivePayloadBytes: Array[Byte], pkgId: Ref.PackageId): String = {
val pkg: Ast.Package = com.daml.lf.archive
.archivePayloadDecoder(pkgId, onlySerializableDataDefs = false)
.assertFromByteArray(archivePayloadBytes)
._2
pkg.metadata.fold[String]("")(_.name)
}
}

View File

@ -1,464 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import com.daml.metrics.api.reporters.MetricsReporter
import scopt.{OptionDef, OptionParser, Read}
import java.io.File
import java.nio.file.Paths
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.util.{Failure, Success, Try}
object Cli {
private val ProgramName: String = "ledger-api-bench-tool"
private val parser: OptionParser[Config] = new OptionParser[Config](ProgramName) {
import Reads._
head("A tool for measuring transaction streaming performance of a ledger.")
opt[(String, Int)]("endpoint")(endpointRead)
.abbr("e")
.text("Ledger API endpoint")
.valueName("<hostname>:<port>")
.optional()
.action { case ((hostname, port), config) =>
config.copy(ledger = config.ledger.copy(hostname = hostname, port = port))
}
opt[String]("indexdb-jdbc-url")
.text("JDBC url to an IndexDB instance")
.optional()
.action { case (url, config) => config.withLedgerConfig(_.copy(indexDbJdbcUrlO = Some(url))) }
opt[WorkflowConfig.StreamConfig]("consume-stream")
.abbr("s")
.optional()
.unbounded()
.text(
s"Stream configuration."
)
.valueName(
"<param1>=<value1>,<param2>=<value2>,..."
)
.action { case (streamConfig, config) =>
config
.copy(workflow = config.workflow.copy(streams = config.workflow.streams :+ streamConfig))
}
opt[File]("workflow-config")
.hidden() // TODO: uncomment when production-ready
.abbr("w")
.optional()
.text(
"A workflow configuration file. Parameters defined via this method take precedence over --consume-stream options."
)
.action { case (workflowConfigFile, config) =>
config.copy(workflowConfigFile = Some(workflowConfigFile))
}
opt[Int]("max-in-flight-commands")
.hidden() // TODO: uncomment when production-ready
.text("Maximum in-flight commands for command submissions.")
.optional()
.action { case (size, config) =>
config.copy(maxInFlightCommands = size)
}
opt[Unit]("latency-test")
.text("Run a SubmitAndWait latency benchmark")
.optional()
.action { case (_, config) => config.copy(latencyTest = true) }
opt[Long]("max-latency-millis")
.text(
"The maximum average latency allowed for latency benchmarks (in millis). Only relevant with `latency-test` enabled."
)
.optional()
.action { case (maxLatencyMillis, config) =>
config.copy(maxLatencyObjectiveMillis = maxLatencyMillis)
}
opt[Int]("submission-batch-size")
.hidden() // TODO: uncomment when production-ready
.text("Number of contracts created per command submission.")
.optional()
.action { case (size, config) =>
config.copy(submissionBatchSize = size)
}
opt[FiniteDuration]("log-interval")
.abbr("r")
.text("Stream metrics log interval.")
.action { case (period, config) => config.copy(reportingPeriod = period) }
opt[Int]("core-pool-size")
.text("Initial size of the worker thread pool.")
.optional()
.action { case (size, config) =>
config.copy(concurrency = config.concurrency.copy(corePoolSize = size))
}
opt[Int]("max-pool-size")
.text("Maximum size of the worker thread pool.")
.optional()
.action { case (size, config) =>
config.copy(concurrency = config.concurrency.copy(maxPoolSize = size))
}
opt[MetricsReporter]("metrics-reporter")
.optional()
.text(s"Start a metrics reporter. ${MetricsReporter.cliHint}")
.action((reporter, config) => config.copy(metricsReporter = reporter))
opt[String]("user-based-authorization-secret")
.optional()
.text(
"Enables user based authorization. The value is used for signing authorization tokens with HMAC256."
)
.action((secret, config) => config.copy(authorizationTokenSecret = Some(secret)))
opt[String]("pem")
.optional()
.text("TLS: The pem file to be used as the private key.")
.validate(validatePath(_, "The file specified via --pem does not exist"))
.action { (path, config) =>
config.copy(tls =
config.tls.copy(enabled = true, privateKeyFile = Some(Paths.get(path).toFile))
)
}
opt[String]("crt")
.optional()
.text(
s"TLS: The crt file to be used as the cert chain. Required for client authentication."
)
.validate(validatePath(_, "The file specified via --crt does not exist"))
.action { (path, config) =>
config
.copy(tls = config.tls.copy(enabled = true, certChainFile = Some(Paths.get(path).toFile)))
}
opt[String]("cacrt")
.optional()
.text("TLS: The crt file to be used as the trusted root CA.")
.validate(validatePath(_, "The file specified via --cacrt does not exist"))
.action { (path, config) =>
config.copy(tls =
config.tls.copy(enabled = true, trustCollectionFile = Some(Paths.get(path).toFile))
)
}
// allows you to enable tls without any special certs,
// i.e., tls without client auth with the default root certs.
// If any certificates are set tls is enabled implicitly and
// this is redundant.
opt[Unit]("tls")
.optional()
.text("TLS: Enable tls. This is redundant if --pem, --crt or --cacrt are set")
.action { (_, config) => config.copy(tls = config.tls.copy(enabled = true)) }
checkConfig(c =>
if (c.latencyTest && c.workflow.streams.nonEmpty)
Left("Latency test cannot have configured streams")
else Right(())
)
private def validatePath(path: String, message: String): Either[String, Unit] = {
val valid = Try(Paths.get(path).toFile.canRead).getOrElse(false)
if (valid) Right(()) else Left(message)
}
help("help").text("Prints this information")
private def note(level: Int, param: String, desc: String = ""): OptionDef[Unit, Config] = {
val paddedParam = s"${" " * level * 2}$param"
val internalPadding = math.max(1, 50 - paddedParam.length)
note(s"$paddedParam${" " * internalPadding}$desc")
}
note(0, "")
note(0, "Stream configuration parameters:")
note(1, "Transactions/transaction trees:")
note(2, "stream-type=<transactions|transaction-trees>", "(required)")
note(2, "name=<stream-name>", "Stream name used to identify results (required)")
note(
2,
"filters=party1@template1@template2+party2",
"List of per-party filters separated by the plus symbol (required)",
)
note(2, "begin-offset=<offset>")
note(2, "end-offset=<offset>")
note(2, "max-delay=<seconds>", "Max record time delay objective")
note(2, "min-consumption-speed=<speed>", "Min consumption speed objective")
note(2, "min-item-rate=<rate>", "Min item rate per second")
note(2, "max-item-rate=<rate>", "Max item rate per second")
note(1, "Active contract sets:")
note(2, "stream-type=active-contracts", "(required)")
note(2, "name=<stream-name>", "Stream name used to identify results (required)")
note(
2,
"filters=party1@template1@template2+party2",
"List of per-party filters separated by the plus symbol (required)",
)
note(2, "min-item-rate=<rate>", "Min item rate per second")
note(2, "max-item-rate=<rate>", "Max item rate per second")
note(1, "Command completions:")
note(2, "stream-type=completions", "(required)")
note(2, "name=<stream-name>", "Stream name used to identify results (required)")
note(2, "party=<party>", "(required)")
note(2, "begin-offset=<offset>")
note(2, "template-ids=<id1>|<id2>")
note(2, "min-item-rate=<rate>", "Min item rate per second")
note(2, "max-item-rate=<rate>", "Max item rate per second")
}
def config(args: Array[String]): Option[Config] =
parser.parse(args, Config.Default)
private object Reads {
implicit val streamConfigRead: Read[WorkflowConfig.StreamConfig] =
Read.mapRead[String, String].map { m =>
def stringField(fieldName: String): Either[String, String] =
m.get(fieldName) match {
case Some(value) => Right(value)
case None => Left(s"Missing field: '$fieldName'")
}
def optionalStringField(fieldName: String): Either[String, Option[String]] =
Right(m.get(fieldName))
def optionalLongField(fieldName: String): Either[String, Option[Long]] =
optionalField[Long](fieldName, _.toLong)
def optionalDoubleField(fieldName: String): Either[String, Option[Double]] =
optionalField[Double](fieldName, _.toDouble)
def optionalScalaDurationField(fieldName: String): Either[String, Option[FiniteDuration]] =
optionalField[String](fieldName, identity).flatMap {
case Some(value) =>
Duration(value) match {
case infinite: Duration.Infinite =>
Left(s"Subscription delay duration must be finite, but got $infinite")
case finiteDuration: FiniteDuration => Right(Some(finiteDuration))
}
case None => Right(None)
}
def optionalField[T](fieldName: String, f: String => T): Either[String, Option[T]] = {
Try(m.get(fieldName).map(f)) match {
case Success(value) => Right(value)
case Failure(_) => Left(s"Invalid value for field name: $fieldName")
}
}
def offset(stringValue: String): LedgerOffset =
stringValue match {
case "ledger-begin" =>
LedgerOffset.defaultInstance.withBoundary(LedgerOffset.LedgerBoundary.LEDGER_BEGIN)
case "ledger-end" =>
LedgerOffset.defaultInstance.withBoundary(LedgerOffset.LedgerBoundary.LEDGER_END)
case _ =>
LedgerOffset.defaultInstance.withAbsolute(stringValue)
}
def transactionObjectives(
maxDelaySeconds: Option[Long],
minConsumptionSpeed: Option[Double],
minItemRate: Option[Double],
maxItemRate: Option[Double],
): Option[WorkflowConfig.StreamConfig.TransactionObjectives] =
(maxDelaySeconds, minConsumptionSpeed, minItemRate, maxItemRate) match {
case (None, None, None, None) => None
case _ =>
Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = maxDelaySeconds,
minConsumptionSpeed = minConsumptionSpeed,
minItemRate = minItemRate,
maxItemRate = maxItemRate,
// NOTE: Unsupported on CLI
maxTotalStreamRuntimeDuration = None,
)
)
}
def transactionsConfig
: Either[String, WorkflowConfig.StreamConfig.TransactionsStreamConfig] = for {
name <- stringField("name")
filters <- stringField("filters").flatMap(parseFilters)
beginOffset <- optionalStringField("begin-offset").map(_.map(offset))
endOffset <- optionalStringField("end-offset").map(_.map(offset))
maxDelaySeconds <- optionalLongField("max-delay")
minConsumptionSpeed <- optionalDoubleField("min-consumption-speed")
minItemRate <- optionalDoubleField("min-item-rate")
maxItemRate <- optionalDoubleField("max-item-rate")
maxItemCount <- optionalLongField("max-item-count")
timeoutO <- optionalScalaDurationField("timeout")
subscriptionDelayO <- optionalScalaDurationField("subscription-delay")
} yield WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = name,
filters = filters,
beginOffset = beginOffset,
endOffset = endOffset,
objectives =
transactionObjectives(maxDelaySeconds, minConsumptionSpeed, minItemRate, maxItemRate),
timeoutO = timeoutO,
maxItemCount = maxItemCount,
// NOTE: Unsupported on CLI
partyNamePrefixFilters = List.empty,
subscriptionDelay = subscriptionDelayO,
)
def transactionTreesConfig
: Either[String, WorkflowConfig.StreamConfig.TransactionTreesStreamConfig] =
for {
name <- stringField("name")
filters <- stringField("filters").flatMap(parseFilters)
beginOffset <- optionalStringField("begin-offset").map(_.map(offset))
endOffset <- optionalStringField("end-offset").map(_.map(offset))
maxDelaySeconds <- optionalLongField("max-delay")
minConsumptionSpeed <- optionalDoubleField("min-consumption-speed")
minItemRate <- optionalDoubleField("min-item-rate")
maxItemRate <- optionalDoubleField("max-item-rate")
maxItemCount <- optionalLongField("max-item-count")
timeoutO <- optionalScalaDurationField("timeout")
subscriptionDelayO <- optionalScalaDurationField("subscription-delay")
} yield WorkflowConfig.StreamConfig.TransactionTreesStreamConfig(
name = name,
filters = filters,
beginOffset = beginOffset,
endOffset = endOffset,
objectives =
transactionObjectives(maxDelaySeconds, minConsumptionSpeed, minItemRate, maxItemRate),
timeoutO = timeoutO,
maxItemCount = maxItemCount,
// NOTE: Unsupported on CLI
partyNamePrefixFilters = List.empty,
subscriptionDelay = subscriptionDelayO,
)
def rateObjectives(
minItemRate: Option[Double],
maxItemRate: Option[Double],
): Option[WorkflowConfig.StreamConfig.AcsAndCompletionsObjectives] =
(minItemRate, maxItemRate) match {
case (None, None) => None
case _ =>
Some(
WorkflowConfig.StreamConfig.AcsAndCompletionsObjectives(
minItemRate = minItemRate,
maxItemRate = maxItemRate,
// NOTE: Unsupported on CLI
maxTotalStreamRuntimeDuration = None,
)
)
}
def activeContractsConfig
: Either[String, WorkflowConfig.StreamConfig.ActiveContractsStreamConfig] = for {
name <- stringField("name")
filters <- stringField("filters").flatMap(parseFilters)
minItemRate <- optionalDoubleField("min-item-rate")
maxItemRate <- optionalDoubleField("max-item-rate")
maxItemCount <- optionalLongField("max-item-count")
timeout <- optionalScalaDurationField("timeout")
subscriptionDelayO <- optionalScalaDurationField("subscription-delay")
} yield WorkflowConfig.StreamConfig.ActiveContractsStreamConfig(
name = name,
filters = filters,
objectives = rateObjectives(minItemRate, maxItemRate),
timeoutO = timeout,
maxItemCount = maxItemCount,
// NOTE: Unsupported on CLI
partyNamePrefixFilters = List.empty,
subscriptionDelay = subscriptionDelayO,
)
def completionsConfig: Either[String, WorkflowConfig.StreamConfig.CompletionsStreamConfig] =
for {
name <- stringField("name")
parties <- stringField("parties").map(parseParties)
applicationId <- stringField("application-id")
beginOffset <- optionalStringField("begin-offset").map(_.map(offset))
minItemRate <- optionalDoubleField("min-item-rate")
maxItemRate <- optionalDoubleField("max-item-rate")
timeoutO <- optionalScalaDurationField("timeout")
maxItemCount <- optionalLongField("max-item-count")
subscriptionDelayO <- optionalScalaDurationField("subscription-delay")
} yield WorkflowConfig.StreamConfig.CompletionsStreamConfig(
name = name,
parties = parties,
applicationId = applicationId,
beginOffset = beginOffset,
objectives = rateObjectives(minItemRate, maxItemRate),
timeoutO = timeoutO,
maxItemCount = maxItemCount,
subscriptionDelay = subscriptionDelayO,
)
val config = stringField("stream-type").flatMap[String, WorkflowConfig.StreamConfig] {
case "transactions" => transactionsConfig
case "transaction-trees" => transactionTreesConfig
case "active-contracts" => activeContractsConfig
case "completions" => completionsConfig
case invalid => Left(s"Invalid stream type: $invalid")
}
config.fold(error => throw new IllegalArgumentException(error), identity)
}
// Parse strings like: "", "party1" or "party1+party2+party3"
private def parseParties(raw: String): List[String] =
raw.split('+').toList
private def parseFilters(
listOfIds: String
): Either[String, List[WorkflowConfig.StreamConfig.PartyFilter]] =
listOfIds
.split('+')
.toList
.map(parseFilter)
.foldLeft[Either[String, List[WorkflowConfig.StreamConfig.PartyFilter]]](
Right(List.empty)
) { case (acc, next) =>
for {
filters <- acc
filter <- next
} yield filters :+ filter
}
private def parseFilter(
filterString: String
): Either[String, WorkflowConfig.StreamConfig.PartyFilter] = {
filterString
.split('@')
.toList match {
case party :: templates =>
Right(
WorkflowConfig.StreamConfig.PartyFilter(party, templates, List.empty)
) // Interfaces are not supported via Cli
case _ => Left("Filter cannot be empty")
}
}
def endpointRead: Read[(String, Int)] = new Read[(String, Int)] {
val arity = 1
val reads: String => (String, Int) = { s: String =>
splitAddress(s) match {
case (k, v) => Read.stringRead.reads(k) -> Read.intRead.reads(v)
}
}
}
private def splitAddress(s: String): (String, String) =
s.indexOf(':') match {
case -1 =>
throw new IllegalArgumentException("Addresses should be specified as `<host>:<port>`")
case n: Int => (s.slice(0, n), s.slice(n + 1, s.length))
}
}
}

View File

@ -1,65 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.tls.TlsConfiguration
import java.io.File
import com.daml.metrics.api.reporters.MetricsReporter
import scala.concurrent.duration._
case class Config(
ledger: Config.Ledger,
concurrency: Config.Concurrency,
tls: TlsConfiguration,
workflow: WorkflowConfig,
reportingPeriod: FiniteDuration,
workflowConfigFile: Option[File],
maxInFlightCommands: Int,
submissionBatchSize: Int,
metricsReporter: MetricsReporter,
authorizationTokenSecret: Option[String],
latencyTest: Boolean,
maxLatencyObjectiveMillis: Long,
) {
def withLedgerConfig(f: Config.Ledger => Config.Ledger): Config = copy(ledger = f(ledger))
}
object Config {
case class Ledger(
hostname: String,
port: Int,
indexDbJdbcUrlO: Option[String] = None,
)
case class Concurrency(
corePoolSize: Int,
maxPoolSize: Int,
keepAliveTime: Long,
maxQueueLength: Int,
)
val Default: Config =
Config(
ledger = Config.Ledger(
hostname = "localhost",
port = 6865,
),
concurrency = Config.Concurrency(
corePoolSize = 2,
maxPoolSize = 8,
keepAliveTime = 30,
maxQueueLength = 10000,
),
tls = TlsConfiguration.Empty.copy(enabled = false),
workflow = WorkflowConfig(),
reportingPeriod = 5.seconds,
workflowConfigFile = None,
maxInFlightCommands = 100,
submissionBatchSize = 100,
metricsReporter = MetricsReporter.Console,
authorizationTokenSecret = None,
latencyTest = false,
maxLatencyObjectiveMillis = 1000L,
)
}

View File

@ -1,45 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.benchtool.util.SimpleFileReader
import java.io.File
import scala.util.{Failure, Success}
object ConfigMaker {
def make(args: Array[String]): Either[ConfigurationError, Config] = {
def parseCli: Either[ConfigurationError, Config] = Cli.config(args) match {
case None => Left(ConfigurationError("Invalid CLI arguments."))
case Some(config) =>
Right(config)
}
def parseWorkflowConfig(workflowConfigFile: File): Either[ConfigurationError, WorkflowConfig] =
SimpleFileReader.readFile(workflowConfigFile)(WorkflowConfigParser.parse) match {
case Failure(ex) =>
Left(ConfigurationError(s"Workflow config reading error: ${ex.getLocalizedMessage}"))
case Success(result) =>
result.left
.map(parserError =>
ConfigurationError(s"Workflow config parsing error: ${parserError.details}")
)
}
for {
config <- parseCli
workflowConfig <- config.workflowConfigFile match {
case None => Right(config.workflow)
case Some(workflowConfigFile) => parseWorkflowConfig(workflowConfigFile)
}
} yield {
// Workflow defined in the YAML file takes precedence over CLI params
config.copy(workflow = workflowConfig)
}
}
case class ConfigurationError(details: String)
}

View File

@ -1,210 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig.{
ConsumingExercises,
NonconsumingExercises,
}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig.PartyNamePrefixFilter
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import scala.concurrent.duration.FiniteDuration
case class WorkflowConfig(
submission: Option[WorkflowConfig.SubmissionConfig] = None,
streams: List[WorkflowConfig.StreamConfig] = Nil,
pruning: Option[WorkflowConfig.PruningConfig] = None,
)
object WorkflowConfig {
sealed trait SubmissionConfig extends Product with Serializable {
def numberOfInstances: Int
def numberOfObservers: Int
def numberOfDivulgees: Int
def numberOfExtraSubmitters: Int
def uniqueParties: Boolean
def waitForSubmission: Boolean
def observerPartySets: List[FooSubmissionConfig.PartySet]
}
final case class FibonacciSubmissionConfig(
numberOfInstances: Int,
uniqueParties: Boolean,
value: Int,
waitForSubmission: Boolean,
) extends SubmissionConfig {
override val numberOfObservers = 0
override val numberOfDivulgees = 0
override val numberOfExtraSubmitters = 0
override val observerPartySets: List[FooSubmissionConfig.PartySet] = List.empty
}
final case class FooSubmissionConfig(
numberOfInstances: Int,
numberOfObservers: Int,
uniqueParties: Boolean,
instanceDistribution: List[FooSubmissionConfig.ContractDescription],
numberOfDivulgees: Int = 0,
numberOfExtraSubmitters: Int = 0,
nonConsumingExercises: Option[NonconsumingExercises] = None,
consumingExercises: Option[ConsumingExercises] = None,
applicationIds: List[FooSubmissionConfig.ApplicationId] = List.empty,
maybeWaitForSubmission: Option[Boolean] = None,
observerPartySets: List[FooSubmissionConfig.PartySet] = List.empty,
allowNonTransientContracts: Boolean = false,
) extends SubmissionConfig {
def waitForSubmission: Boolean = maybeWaitForSubmission.getOrElse(true)
}
object FooSubmissionConfig {
/** @param partyNamePrefix prefix of each party in this party set; also serves as its identifier
* @param count number of parties to create
* @param visibility a fraction of contracts that each of the parties from this set should see
*/
final case class PartySet(
partyNamePrefix: String,
count: Int,
visibility: Double,
)
case class ContractDescription(
template: String,
weight: Int,
payloadSizeBytes: Int,
)
case class NonconsumingExercises(
probability: Double,
payloadSizeBytes: Int,
)
case class ConsumingExercises(
probability: Double,
payloadSizeBytes: Int,
)
final case class ApplicationId(
applicationId: String,
weight: Int,
)
}
case class PruningConfig(
name: String,
pruneAllDivulgedContracts: Boolean,
maxDurationObjective: FiniteDuration,
)
sealed trait StreamConfig extends Product with Serializable {
def name: String
/** If specified, used to cancel the stream when enough items has been seen.
*/
def maxItemCount: Option[Long] = None
/** If specified, used to cancel the stream after the specified time out
*/
def timeoutO: Option[FiniteDuration] = None
def partySetPrefixes: List[String]
def partyNamePrefixFilters: List[PartyNamePrefixFilter]
def subscriptionDelay: Option[FiniteDuration]
}
object StreamConfig {
final case class PartyFilter(
party: String,
templates: List[String] = List.empty,
interfaces: List[String] = List.empty,
)
final case class PartyNamePrefixFilter(
partyNamePrefix: String,
templates: List[String] = List.empty,
interfaces: List[String] = List.empty,
)
final case class TransactionsStreamConfig(
name: String,
filters: List[PartyFilter] = List.empty,
partyNamePrefixFilters: List[PartyNamePrefixFilter] = List.empty,
beginOffset: Option[LedgerOffset] = None,
endOffset: Option[LedgerOffset] = None,
objectives: Option[StreamConfig.TransactionObjectives] = None,
subscriptionDelay: Option[FiniteDuration] = None,
override val maxItemCount: Option[Long] = None,
override val timeoutO: Option[FiniteDuration] = None,
) extends StreamConfig {
override def partySetPrefixes: List[String] = partyNamePrefixFilters.map(_.partyNamePrefix)
}
final case class TransactionTreesStreamConfig(
name: String,
filters: List[PartyFilter],
partyNamePrefixFilters: List[PartyNamePrefixFilter] = List.empty,
beginOffset: Option[LedgerOffset] = None,
endOffset: Option[LedgerOffset] = None,
objectives: Option[StreamConfig.TransactionObjectives] = None,
subscriptionDelay: Option[FiniteDuration] = None,
override val maxItemCount: Option[Long] = None,
override val timeoutO: Option[FiniteDuration] = None,
) extends StreamConfig {
override def partySetPrefixes: List[String] =
partyNamePrefixFilters.map(_.partyNamePrefix)
}
final case class ActiveContractsStreamConfig(
name: String,
filters: List[PartyFilter],
partyNamePrefixFilters: List[PartyNamePrefixFilter] = List.empty,
objectives: Option[StreamConfig.AcsAndCompletionsObjectives] = None,
subscriptionDelay: Option[FiniteDuration] = None,
override val maxItemCount: Option[Long] = None,
override val timeoutO: Option[FiniteDuration] = None,
) extends StreamConfig {
override def partySetPrefixes: List[String] =
partyNamePrefixFilters.map(_.partyNamePrefix)
}
final case class CompletionsStreamConfig(
name: String,
parties: List[String],
applicationId: String,
beginOffset: Option[LedgerOffset],
objectives: Option[StreamConfig.AcsAndCompletionsObjectives],
subscriptionDelay: Option[FiniteDuration] = None,
override val maxItemCount: Option[Long],
override val timeoutO: Option[FiniteDuration],
) extends StreamConfig {
override def partySetPrefixes: List[String] = List.empty
override def partyNamePrefixFilters: List[PartyNamePrefixFilter] = List.empty
}
trait CommonObjectivesConfig {
def maxTotalStreamRuntimeDuration: Option[FiniteDuration]
def minItemRate: Option[Double]
def maxItemRate: Option[Double]
}
case class TransactionObjectives(
maxDelaySeconds: Option[Long],
minConsumptionSpeed: Option[Double],
override val minItemRate: Option[Double],
override val maxItemRate: Option[Double],
override val maxTotalStreamRuntimeDuration: Option[FiniteDuration] = None,
) extends CommonObjectivesConfig
case class AcsAndCompletionsObjectives(
override val minItemRate: Option[Double],
override val maxItemRate: Option[Double],
override val maxTotalStreamRuntimeDuration: Option[FiniteDuration] = None,
) extends CommonObjectivesConfig
}
}

View File

@ -1,362 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import cats.syntax.functor._
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig.{
ConsumingExercises,
NonconsumingExercises,
}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig.{
PartyFilter,
PartyNamePrefixFilter,
}
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import io.circe.yaml.Parser
import io.circe.{Decoder, HCursor}
import java.io.Reader
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.util.{Failure, Success, Try}
object WorkflowConfigParser {
import Decoders._
import WorkflowConfig._
def parse(reader: Reader): Either[ParserError, WorkflowConfig] =
Parser.default
.parse(reader)
.flatMap(_.as[WorkflowConfig])
.left
.map(error => ParserError(error.getLocalizedMessage))
case class ParserError(details: String)
object Decoders {
implicit val scalaDurationDecoder: Decoder[FiniteDuration] =
Decoder.decodeString.emapTry(strDuration =>
Try(Duration(strDuration)).flatMap {
case infinite: Duration.Infinite =>
Failure(
new IllegalArgumentException(
s"Subscription delay duration must be finite, but got $infinite"
)
)
case duration: FiniteDuration => Success(duration)
}
)
implicit val transactionObjectivesDecoder: Decoder[StreamConfig.TransactionObjectives] =
Decoder.forProduct5(
"max_delay_seconds",
"min_consumption_speed",
"min_item_rate",
"max_item_rate",
"max_stream_duration",
)(StreamConfig.TransactionObjectives.apply)
implicit val rateObjectivesDecoder: Decoder[StreamConfig.AcsAndCompletionsObjectives] =
Decoder.forProduct3(
"min_item_rate",
"max_item_rate",
"max_stream_duration",
)(StreamConfig.AcsAndCompletionsObjectives.apply)
implicit val offsetDecoder: Decoder[LedgerOffset] = {
Decoder.decodeString.map {
case "ledger-begin" => LedgerOffset().withBoundary(LedgerOffset.LedgerBoundary.LEDGER_BEGIN)
case "ledger-end" => LedgerOffset().withBoundary(LedgerOffset.LedgerBoundary.LEDGER_END)
case absolute => LedgerOffset.defaultInstance.withAbsolute(absolute)
}
}
implicit val partyFilterDecoder: Decoder[StreamConfig.PartyFilter] =
(c: HCursor) => {
for {
party <- c.downField("party").as[String]
templates <- c.downField("templates").as[Option[List[String]]]
interfaces <- c.downField("interfaces").as[Option[List[String]]]
} yield StreamConfig.PartyFilter(
party,
templates.getOrElse(List.empty),
interfaces.getOrElse(List.empty),
)
}
implicit val partySetTemplateFilterDecoder: Decoder[StreamConfig.PartyNamePrefixFilter] =
(c: HCursor) => {
for {
partyNamePrefix <- c.downField("party_name_prefix").as[String]
templates <- c.downField("templates").as[Option[List[String]]]
interfaces <- c.downField("interfaces").as[Option[List[String]]]
} yield StreamConfig.PartyNamePrefixFilter(
partyNamePrefix,
templates.getOrElse(List.empty),
interfaces.getOrElse(List.empty),
)
}
implicit val transactionStreamDecoder: Decoder[StreamConfig.TransactionsStreamConfig] =
(c: HCursor) => {
for {
name <- c.downField("name").as[String]
filters <- c.downField("filters").as[Option[List[PartyFilter]]]
beginOffset <- c.downField("begin_offset").as[Option[LedgerOffset]]
endOffset <- c.downField("end_offset").as[Option[LedgerOffset]]
partyNamePrefixFilters <- c
.downField("party_prefix_filters")
.as[Option[List[PartyNamePrefixFilter]]]
objectives <- c.downField("objectives").as[Option[StreamConfig.TransactionObjectives]]
subscriptionDelay <- c
.downField("subscription_delay")
.as[Option[FiniteDuration]]
maxItemCount <- c.downField("max_item_count").as[Option[Long]]
timeout <- c
.downField("timeout")
.as[Option[FiniteDuration]]
} yield StreamConfig.TransactionsStreamConfig(
name = name,
filters = filters.getOrElse(List.empty),
partyNamePrefixFilters = partyNamePrefixFilters.getOrElse(List.empty),
beginOffset = beginOffset,
endOffset = endOffset,
objectives = objectives,
subscriptionDelay = subscriptionDelay,
maxItemCount = maxItemCount,
timeoutO = timeout,
)
}
implicit val transactionTreesStreamDecoder: Decoder[StreamConfig.TransactionTreesStreamConfig] =
(c: HCursor) => {
for {
name <- c.downField("name").as[String]
filters <- c.downField("filters").as[Option[List[PartyFilter]]]
beginOffset <- c.downField("begin_offset").as[Option[LedgerOffset]]
endOffset <- c.downField("end_offset").as[Option[LedgerOffset]]
partyNamePrefixFilters <- c
.downField("party_prefix_filters")
.as[Option[List[PartyNamePrefixFilter]]]
objectives <- c.downField("objectives").as[Option[StreamConfig.TransactionObjectives]]
subscriptionDelay <- c
.downField("subscription_delay")
.as[Option[FiniteDuration]]
maxItemCount <- c.downField("max_item_count").as[Option[Long]]
timeout <- c
.downField("timeout")
.as[Option[FiniteDuration]]
} yield StreamConfig.TransactionTreesStreamConfig(
name = name,
filters = filters.getOrElse(List.empty),
partyNamePrefixFilters = partyNamePrefixFilters.getOrElse(List.empty),
beginOffset = beginOffset,
endOffset = endOffset,
objectives = objectives,
subscriptionDelay = subscriptionDelay,
maxItemCount = maxItemCount,
timeoutO = timeout,
)
}
implicit val activeContractsStreamDecoder: Decoder[StreamConfig.ActiveContractsStreamConfig] =
(c: HCursor) => {
for {
name <- c.downField("name").as[String]
filters <- c.downField("filters").as[Option[List[PartyFilter]]]
partyNamePrefixFilters <- c
.downField("party_prefix_filters")
.as[Option[List[PartyNamePrefixFilter]]]
objectives <- c
.downField("objectives")
.as[Option[StreamConfig.AcsAndCompletionsObjectives]]
subscriptionDelay <- c
.downField("subscription_delay")
.as[Option[FiniteDuration]]
maxItemCount <- c.downField("max_item_count").as[Option[Long]]
timeout <- c
.downField("timeout")
.as[Option[FiniteDuration]]
} yield StreamConfig.ActiveContractsStreamConfig(
name = name,
filters = filters.getOrElse(List.empty),
partyNamePrefixFilters = partyNamePrefixFilters.getOrElse(List.empty),
objectives = objectives,
subscriptionDelay = subscriptionDelay,
maxItemCount = maxItemCount,
timeoutO = timeout,
)
}
Decoder.forProduct7(
"name",
"filters",
"party_prefix_filters",
"objectives",
"subscription_delay",
"max_item_count",
"timeout",
)(StreamConfig.ActiveContractsStreamConfig.apply)
implicit val completionsStreamDecoder: Decoder[StreamConfig.CompletionsStreamConfig] =
(c: HCursor) => {
for {
name <- c.downField("name").as[String]
parties <- c.downField("parties").as[List[String]]
applicationId <- c.downField("application_id").as[String]
beginOffset <- c.downField("begin_offset").as[Option[LedgerOffset]]
objectives <- c
.downField("objectives")
.as[Option[StreamConfig.AcsAndCompletionsObjectives]]
subscriptionDelay <- c
.downField("subscription_delay")
.as[Option[FiniteDuration]]
maxItemCount <- c.downField("max_item_count").as[Option[Long]]
timeout <- c
.downField("timeout")
.as[Option[FiniteDuration]]
} yield StreamConfig.CompletionsStreamConfig(
name = name,
parties = parties,
applicationId = applicationId,
beginOffset = beginOffset,
objectives = objectives,
subscriptionDelay = subscriptionDelay,
maxItemCount = maxItemCount,
timeoutO = timeout,
)
}
implicit val streamConfigDecoder: Decoder[StreamConfig] =
Decoder
.forProduct1[String, String]("type")(identity)
.flatMap[StreamConfig] {
case "transactions" => transactionStreamDecoder.widen
case "transaction-trees" => transactionTreesStreamDecoder.widen
case "active-contracts" => activeContractsStreamDecoder.widen
case "completions" => completionsStreamDecoder.widen
case invalid => Decoder.failedWithMessage(s"Invalid stream type: $invalid")
}
implicit val contractDescriptionDecoder: Decoder[FooSubmissionConfig.ContractDescription] =
Decoder.forProduct3(
"template",
"weight",
"payload_size_bytes",
)(FooSubmissionConfig.ContractDescription.apply)
implicit val nonconsumingExercisesDecoder: Decoder[FooSubmissionConfig.NonconsumingExercises] =
Decoder.forProduct2(
"probability",
"payload_size_bytes",
)(FooSubmissionConfig.NonconsumingExercises.apply)
implicit val consumingExercisesDecoder: Decoder[FooSubmissionConfig.ConsumingExercises] =
Decoder.forProduct2(
"probability",
"payload_size_bytes",
)(FooSubmissionConfig.ConsumingExercises.apply)
implicit val applicationIdConfigDecoder: Decoder[FooSubmissionConfig.ApplicationId] =
Decoder.forProduct2(
"id",
"weight",
)(FooSubmissionConfig.ApplicationId.apply)
implicit val partySetDecoder: Decoder[FooSubmissionConfig.PartySet] =
Decoder.forProduct3(
"party_name_prefix",
"count",
"visibility",
)(FooSubmissionConfig.PartySet.apply)
implicit val fooSubmissionConfigDecoder: Decoder[FooSubmissionConfig] =
(c: HCursor) => {
for {
allowNonTransientContracts <- c
.downField("allow_non_transient_contracts")
.as[Option[Boolean]]
numInstances <- c.downField("num_instances").as[Int]
numObservers <- c.downField("num_observers").as[Int]
uniqueObservers <- c.downField("unique_parties").as[Boolean]
instancesDistribution <- c
.downField("instance_distribution")
.as[List[FooSubmissionConfig.ContractDescription]]
numberOfDivulgees <- c.downField("num_divulgees").as[Option[Int]]
numberOfExtraSubmitters <- c.downField("num_extra_submitters").as[Option[Int]]
nonConsumingExercises <- c
.downField("nonconsuming_exercises")
.as[Option[NonconsumingExercises]]
consumingExercises <- c.downField("consuming_exercises").as[Option[ConsumingExercises]]
applicationIds <- c
.downField("application_ids")
.as[Option[List[FooSubmissionConfig.ApplicationId]]]
maybeWaitForSubmission <- c.downField("wait_for_submission").as[Option[Boolean]]
observerPartySets <- c
.downField("observers_party_sets")
.as[Option[List[FooSubmissionConfig.PartySet]]]
} yield FooSubmissionConfig(
allowNonTransientContracts = allowNonTransientContracts.getOrElse(false),
numberOfInstances = numInstances,
numberOfObservers = numObservers,
uniqueParties = uniqueObservers,
instanceDistribution = instancesDistribution,
numberOfDivulgees = numberOfDivulgees.getOrElse(0),
numberOfExtraSubmitters = numberOfExtraSubmitters.getOrElse(0),
nonConsumingExercises = nonConsumingExercises,
consumingExercises = consumingExercises,
applicationIds = applicationIds.getOrElse(List.empty),
maybeWaitForSubmission = maybeWaitForSubmission,
observerPartySets = observerPartySets.getOrElse(List.empty),
)
}
implicit val fibonacciSubmissionConfigDecoder: Decoder[FibonacciSubmissionConfig] =
Decoder.forProduct4(
"num_instances",
"unique_parties",
"value",
"wait_for_submission",
)(FibonacciSubmissionConfig.apply)
implicit val submissionConfigDecoder: Decoder[SubmissionConfig] =
Decoder
.forProduct1[String, String]("type")(identity)
.flatMap[SubmissionConfig] {
case "foo" => fooSubmissionConfigDecoder.widen
case "fibonacci" => fibonacciSubmissionConfigDecoder.widen
case invalid => Decoder.failedWithMessage(s"Invalid submission type: $invalid")
}
val pruningConfigInternal: Decoder[PruningConfig] = (c: HCursor) => {
for {
name <- c.downField("name").as[String]
pruneAllDivulgedContracts <- c.downField("prune_all_divulged_contracts").as[Boolean]
maxDurationObjective <- c.downField("max_duration_objective").as[FiniteDuration]
} yield PruningConfig(
name = name,
pruneAllDivulgedContracts = pruneAllDivulgedContracts,
maxDurationObjective = maxDurationObjective,
)
}
implicit val pruningConfig: Decoder[PruningConfig] =
Decoder
.forProduct1[String, String]("type")(identity)
.flatMap[PruningConfig] {
case "pruning" => pruningConfigInternal
case invalid => Decoder.failedWithMessage(s"Invalid submission type: $invalid")
}
implicit val workflowConfigDecoder: Decoder[WorkflowConfig] =
(c: HCursor) =>
for {
submission <- c.downField("submission").as[Option[SubmissionConfig]]
streams <- c
.downField("streams")
.as[Option[List[WorkflowConfig.StreamConfig]]]
.map(_.getOrElse(Nil))
unary <- c.downField("unary").as[Option[List[PruningConfig]]]
} yield WorkflowConfig(submission, streams, unary.toList.flatten.headOption)
}
}

View File

@ -1,33 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.infrastructure
import com.daml.ledger.api.benchtool.util.SimpleFileReader
import com.daml.ledger.test.TestDar
import com.google.protobuf.ByteString
import scala.util.{Failure, Success, Try}
import scala.util.control.NonFatal
object TestDars {
private val TestDarInfix = "benchtool"
private lazy val resources: List[String] = TestDar.paths.filter(_.contains(TestDarInfix))
def readAll(): Try[List[DarFile]] = {
(TestDars.resources
.foldLeft[Try[List[DarFile]]](Success(List.empty)) { (acc, resourceName) =>
for {
dars <- acc
bytes <- SimpleFileReader.readResource(resourceName)
} yield DarFile(resourceName, bytes) :: dars
})
.recoverWith { case NonFatal(ex) =>
Failure(TestDarsError(s"Reading test dars failed. Details: ${ex.getLocalizedMessage}", ex))
}
}
case class TestDarsError(message: String, cause: Throwable) extends Exception(message, cause)
case class DarFile(name: String, bytes: ByteString)
}

View File

@ -1,11 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
sealed trait BenchmarkResult extends Product with Serializable
object BenchmarkResult {
final case object Ok extends BenchmarkResult
final case object ObjectivesViolated extends BenchmarkResult
}

View File

@ -1,120 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.util.TimeUtil
import com.google.protobuf.timestamp.Timestamp
import java.time.{Duration, Instant}
final case class ConsumptionSpeedMetric[T](
recordTimeFunction: T => Seq[Timestamp],
objective: Option[
(ConsumptionSpeedMetric.MinConsumptionSpeed, Option[ConsumptionSpeedMetric.Value])
],
previousLatest: Option[Instant] = None,
currentPeriodLatest: Option[Instant] = None,
) extends Metric[T] {
import ConsumptionSpeedMetric._
override type V = Value
override type Objective = MinConsumptionSpeed
override def onNext(value: T): ConsumptionSpeedMetric[T] = {
val recordTimes = recordTimeFunction(value)
val newPreviousLatest =
previousLatest match {
case None => recordTimes.headOption.map(TimeUtil.timestampToInstant)
case v => v
}
val newCurrentPeriodLatest = recordTimes.lastOption.map(TimeUtil.timestampToInstant)
this.copy(
previousLatest = newPreviousLatest,
currentPeriodLatest = newCurrentPeriodLatest,
)
}
override def periodicValue(periodDuration: Duration): (Metric[T], Value) = {
val value = Value(Some(periodicSpeed(periodDuration)))
val updatedMetric = this.copy(
previousLatest = if (currentPeriodLatest.isDefined) currentPeriodLatest else previousLatest,
currentPeriodLatest = None,
objective = updatedObjectives(value),
)
(updatedMetric, value)
}
override def finalValue(totalDuration: Duration): Value =
Value(None)
override def violatedPeriodicObjectives: List[(MinConsumptionSpeed, Value)] =
objective.collect {
case (objective, value) if value.isDefined => objective -> value.get
}.toList
override def violatedFinalObjectives(
totalDuration: Duration
): List[(MinConsumptionSpeed, Value)] = Nil
private def periodicSpeed(periodDuration: Duration): Double =
(previousLatest, currentPeriodLatest) match {
case (Some(previous), Some(current)) =>
(current.toEpochMilli - previous.toEpochMilli).toDouble / periodDuration.toMillis
case _ =>
0.0
}
private def updatedObjectives(newValue: Value): Option[
(MinConsumptionSpeed, Option[Value])
] =
objective.map { case (objective, currentMaxValue) =>
if (objective.isViolatedBy(newValue)) {
currentMaxValue match {
case None =>
objective -> Some(newValue)
case Some(currentValue) =>
objective -> Some(Ordering[Value].min(currentValue, newValue))
}
} else {
objective -> currentMaxValue
}
}
}
object ConsumptionSpeedMetric {
def empty[T](
recordTimeFunction: T => Seq[Timestamp],
objective: Option[MinConsumptionSpeed] = None,
): ConsumptionSpeedMetric[T] =
ConsumptionSpeedMetric(
recordTimeFunction,
objective.map(objective => objective -> None),
)
// TODO: remove option
final case class Value(relativeSpeed: Option[Double]) extends MetricValue
object Value {
implicit val ordering: Ordering[Value] = (x: Value, y: Value) => {
(x.relativeSpeed, y.relativeSpeed) match {
case (Some(xx), Some(yy)) =>
if (xx < yy) -1
else if (xx > yy) 1
else 0
case (Some(_), None) => 1
case (None, Some(_)) => -1
case (None, None) => 0
}
}
}
final case class MinConsumptionSpeed(minSpeed: Double) extends ServiceLevelObjective[Value] {
override def isViolatedBy(metricValue: Value): Boolean =
Ordering[Value].lt(metricValue, v)
private val v = Value(Some(minSpeed))
}
}

View File

@ -1,106 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Duration
final case class CountRateMetric[T](
countingFunction: T => Int,
periodicObjectives: List[
(CountRateMetric.RateObjective, Option[CountRateMetric.Value])
],
finalObjectives: List[CountRateMetric.RateObjective],
counter: Int = 0,
lastCount: Int = 0,
) extends Metric[T] {
import CountRateMetric._
override type V = Value
override type Objective = RateObjective
override def onNext(value: T): CountRateMetric[T] =
this.copy(counter = counter + countingFunction(value))
override def periodicValue(periodDuration: Duration): (Metric[T], Value) = {
val value = Value(periodicRate(periodDuration))
val updatedMetric = this.copy(
periodicObjectives = updatedPeriodicObjectives(value),
lastCount = counter,
)
(updatedMetric, value)
}
override def finalValue(totalDuration: Duration): Value =
Value(ratePerSecond = totalRate(totalDuration))
override def violatedPeriodicObjectives: List[(RateObjective, Value)] =
periodicObjectives.collect { case (objective, Some(value)) =>
objective -> value
}
override def violatedFinalObjectives(
totalDuration: Duration
): List[(RateObjective, Value)] =
finalObjectives.collect {
case objective if objective.isViolatedBy(finalValue(totalDuration)) =>
(objective, finalValue(totalDuration))
}
private def periodicRate(periodDuration: Duration): Double =
(counter - lastCount) * 1000.0 / periodDuration.toMillis
private def totalRate(totalDuration: Duration): Double =
counter / totalDuration.toMillis.toDouble * 1000.0
private def updatedPeriodicObjectives(
newValue: Value
): List[(RateObjective, Option[Value])] = {
periodicObjectives.map { case (objective, currentMinValue) =>
if (objective.isViolatedBy(newValue)) {
currentMinValue match {
case None => objective -> Some(newValue)
case Some(currentValue) => objective -> Some(Ordering[Value].min(currentValue, newValue))
}
} else {
objective -> currentMinValue
}
}
}
}
object CountRateMetric {
final case class Value(ratePerSecond: Double) extends MetricValue
object Value {
implicit val ordering: Ordering[Value] =
Ordering.fromLessThan(_.ratePerSecond < _.ratePerSecond)
}
abstract class RateObjective extends ServiceLevelObjective[Value] with Product with Serializable
object RateObjective {
final case class MinRate(minAllowedRatePerSecond: Double) extends RateObjective {
override def isViolatedBy(metricValue: CountRateMetric.Value): Boolean =
Ordering[CountRateMetric.Value].lt(metricValue, v)
private val v = CountRateMetric.Value(minAllowedRatePerSecond)
}
final case class MaxRate(minAllowedRatePerSecond: Double) extends RateObjective {
override def isViolatedBy(metricValue: CountRateMetric.Value): Boolean =
Ordering[CountRateMetric.Value].gt(metricValue, v)
private val v = CountRateMetric.Value(minAllowedRatePerSecond)
}
}
def empty[T](
countingFunction: T => Int,
periodicObjectives: List[RateObjective],
finalObjectives: List[RateObjective],
): CountRateMetric[T] = CountRateMetric[T](
countingFunction,
periodicObjectives.map(obj => obj -> None),
finalObjectives = finalObjectives,
)
}

View File

@ -1,116 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.util.TimeUtil
import com.google.protobuf.timestamp.Timestamp
import java.time.{Clock, Duration}
final case class DelayMetric[T](
recordTimeFunction: T => Seq[Timestamp],
clock: Clock,
objective: Option[(DelayMetric.MaxDelay, Option[DelayMetric.Value])],
delaysInCurrentInterval: List[Duration] = List.empty,
) extends Metric[T] {
import DelayMetric._
override type V = Value
override type Objective = MaxDelay
override def onNext(value: T): DelayMetric[T] = {
val now = clock.instant()
val newDelays: List[Duration] = recordTimeFunction(value).toList
.map(TimeUtil.durationBetween(_, now))
this.copy(delaysInCurrentInterval = delaysInCurrentInterval ::: newDelays)
}
override def periodicValue(periodDuration: Duration): (Metric[T], Value) = {
val value = Value(periodicMeanDelay.map(_.getSeconds))
val updatedMetric = this.copy(
delaysInCurrentInterval = List.empty,
objective = updatedObjective(value),
)
(updatedMetric, value)
}
override def finalValue(totalDuration: Duration): Value =
Value(None)
override def violatedPeriodicObjectives: List[(MaxDelay, Value)] =
objective.collect {
case (objective, value) if value.isDefined => objective -> value.get
}.toList
override def violatedFinalObjectives(
totalDuration: Duration
): List[(MaxDelay, Value)] = Nil
private def updatedObjective(
newValue: Value
): Option[(MaxDelay, Option[DelayMetric.Value])] =
objective.map { case (objective, currentViolatingValue) =>
// verify if the new value violates objective's requirements
if (objective.isViolatedBy(newValue)) {
currentViolatingValue match {
case None =>
// if the new value violates objective's requirements and there is no other violating value,
// record the new value
objective -> Some(newValue)
case Some(currentValue) =>
// if the new value violates objective's requirements and there is already a value that violates
// requirements, record the maximum value of the two
objective -> Some(Ordering[V].max(currentValue, newValue))
}
} else {
objective -> currentViolatingValue
}
}
private def periodicMeanDelay: Option[Duration] =
if (delaysInCurrentInterval.nonEmpty)
Some(
delaysInCurrentInterval
.reduceLeft(_.plus(_))
.dividedBy(delaysInCurrentInterval.length.toLong)
)
else None
}
object DelayMetric {
def empty[T](
recordTimeFunction: T => Seq[Timestamp],
clock: Clock,
objective: Option[MaxDelay] = None,
): DelayMetric[T] =
DelayMetric(
recordTimeFunction = recordTimeFunction,
clock = clock,
objective = objective.map(objective => objective -> None),
)
final case class Value(meanDelaySeconds: Option[Long]) extends MetricValue
object Value {
implicit val valueOrdering: Ordering[Value] = (x: Value, y: Value) => {
(x.meanDelaySeconds, y.meanDelaySeconds) match {
case (Some(xx), Some(yy)) =>
if (xx < yy) -1
else if (xx > yy) 1
else 0
case (Some(_), None) => 1
case (None, Some(_)) => -1
case (None, None) => 0
}
}
}
final case class MaxDelay(maxDelaySeconds: Long)
extends ServiceLevelObjective[DelayMetric.Value] {
override def isViolatedBy(metricValue: DelayMetric.Value): Boolean =
metricValue.meanDelaySeconds.exists(_ > maxDelaySeconds)
}
}

View File

@ -1,103 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Clock
import com.daml.ledger.api.benchtool.util.TimeUtil
import com.daml.metrics.api.MetricHandle.{Counter, Gauge, Histogram, MetricsFactory}
import com.daml.metrics.api.{MetricName, MetricsContext}
import com.google.protobuf.timestamp.Timestamp
import scala.annotation.nowarn
final class ExposedMetrics[T](
counterMetric: ExposedMetrics.CounterMetric[T],
bytesProcessedMetric: ExposedMetrics.BytesProcessedMetric[T],
delayMetric: Option[ExposedMetrics.DelayMetric[T]],
latestRecordTimeMetric: Option[ExposedMetrics.LatestRecordTimeMetric[T]],
clock: Clock,
) {
def onNext(elem: T): Unit = {
counterMetric.counter.inc(counterMetric.countingFunction(elem))(MetricsContext.Empty)
bytesProcessedMetric.bytesProcessed.inc(bytesProcessedMetric.sizingFunction(elem))(
MetricsContext.Empty
)
delayMetric.foreach { metric =>
val now = clock.instant()
metric
.recordTimeFunction(elem)
.foreach { recordTime =>
val delay = TimeUtil.durationBetween(recordTime, now)
metric.delays.update(delay.getSeconds)
}
}
latestRecordTimeMetric.foreach { metric =>
metric
.recordTimeFunction(elem)
.lastOption
.foreach(recordTime => metric.latestRecordTime.updateValue(recordTime.seconds))
}
}
}
object ExposedMetrics {
private val Prefix: MetricName = MetricName.Daml :+ "bench_tool"
case class CounterMetric[T](counter: Counter, countingFunction: T => Long)
case class BytesProcessedMetric[T](bytesProcessed: Counter, sizingFunction: T => Long)
case class DelayMetric[T](delays: Histogram, recordTimeFunction: T => Seq[Timestamp])
case class LatestRecordTimeMetric[T](
latestRecordTime: Gauge[Long],
recordTimeFunction: T => Seq[Timestamp],
)
def apply[T](
streamName: String,
@nowarn factory: MetricsFactory,
countingFunction: T => Long,
sizingFunction: T => Long,
recordTimeFunction: Option[T => Seq[Timestamp]],
clock: Clock = Clock.systemUTC(),
): ExposedMetrics[T] = {
val counterMetric = CounterMetric[T](
counter = factory.counter(
Prefix :+ "count" :+ streamName
),
countingFunction = countingFunction,
)
val bytesProcessedMetric = BytesProcessedMetric[T](
bytesProcessed = factory.counter(
Prefix :+ "bytes_read" :+ streamName
),
sizingFunction = sizingFunction,
)
val delayMetric = recordTimeFunction.map { f =>
DelayMetric[T](
delays = factory.histogram(
Prefix :+ "delay" :+ streamName
),
recordTimeFunction = f,
)
}
val latestRecordTimeMetric = recordTimeFunction.map { f =>
LatestRecordTimeMetric[T](
latestRecordTime = factory.gauge(
Prefix :+ "latest_record_time" :+ streamName,
0L,
)(MetricsContext.Empty),
recordTimeFunction = f,
)
}
new ExposedMetrics[T](
counterMetric = counterMetric,
bytesProcessedMetric = bytesProcessedMetric,
delayMetric = delayMetric,
latestRecordTimeMetric = latestRecordTimeMetric,
clock = clock,
)
}
}

View File

@ -1,53 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.LatencyMetric.{LatencyNanos, MaxLatency, Value}
import java.time.Duration
case class LatencyMetric(totalNanos: LatencyNanos, numberObservations: Int, maxLatency: MaxLatency)
extends Metric[LatencyNanos] {
override type V = LatencyMetric.Value
override type Objective = MaxLatency
override def onNext(value: LatencyNanos): Metric[LatencyNanos] =
copy(
totalNanos = totalNanos + value,
numberObservations = numberObservations + 1,
)
override def periodicValue(periodDuration: Duration): (Metric[LatencyNanos], Value) =
this -> currentAverage
override def finalValue(totalDuration: Duration): Value =
currentAverage
override def violatedFinalObjectives(
totalDuration: Duration
): List[(MaxLatency, Value)] = {
val averageLatency = finalValue(totalDuration)
val violation = maxLatency.isViolatedBy(averageLatency)
if (violation) List(maxLatency -> averageLatency)
else Nil
}
private def currentAverage: Value =
if (numberObservations == 0) Value(0L) else Value(totalNanos / numberObservations)
}
object LatencyMetric {
type LatencyNanos = Long
case class Value(latencyNanos: LatencyNanos) extends MetricValue
def empty(maxLatencyObjectiveMillis: Long): LatencyMetric =
LatencyMetric(0, 0, MaxLatency(maxLatencyObjectiveMillis * 1000000L))
final case class MaxLatency(maxLatency: LatencyNanos) extends ServiceLevelObjective[Value] {
override def isViolatedBy(metricValue: Value): Boolean =
metricValue.latencyNanos > maxLatency
def millis: Double = maxLatency / 1000000d
}
}

View File

@ -1,32 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import org.slf4j.Logger
import scala.concurrent.Future
class MeteredStreamObserver[T](
val streamName: String,
logger: Logger,
manager: MetricsManager[T],
itemCountingFunction: T => Long,
maxItemCount: Option[Long],
) extends ObserverWithResult[T, BenchmarkResult](logger) {
private var itemsCount = 0L
override def onNext(value: T): Unit = {
itemsCount += itemCountingFunction(value)
manager.sendNewValue(value)
super.onNext(value)
if (maxItemCount.isDefined && itemsCount >= maxItemCount.get)
cancel()
}
override def completeWith(): Future[BenchmarkResult] = {
manager.result()
}
}

View File

@ -1,42 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Duration
trait Metric[Elem] {
type V <: MetricValue
type Objective <: ServiceLevelObjective[V]
/** @return an updated version of itself
*/
def onNext(value: Elem): Metric[Elem]
/** @return an updated version of itself and the value observed in this period
*
* NOTE: Durations of subsequent periods are not guaranteed to be exactly the same.
*/
def periodicValue(periodDuration: Duration): (Metric[Elem], V)
def finalValue(totalDuration: Duration): V
/** @return a list of objective violations, where each element is a pair of
* a violated objective and the periodic value that violates it the most.
*/
def violatedPeriodicObjectives: List[(Objective, V)] = Nil
/** @return a list of objective violations, where each element is a pair of
* a violated objective and the final value that violates it.
*/
def violatedFinalObjectives(totalDuration: Duration): List[(Objective, V)]
def name: String = getClass.getSimpleName
}
object Metric {
def rounded(value: Double): String = "%.2f".format(value)
}

View File

@ -1,53 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.util.concurrent.TimeUnit
import com.daml.ledger.resources.{Resource, ResourceContext, ResourceOwner}
import com.daml.metrics.api.opentelemetry.Slf4jMetricExporter
import com.daml.metrics.api.reporters.MetricsReporter
import io.opentelemetry.api.metrics.MeterProvider
import io.opentelemetry.exporter.prometheus.PrometheusHttpServer
import io.opentelemetry.sdk.metrics.SdkMeterProvider
import io.opentelemetry.sdk.metrics.`export`.PeriodicMetricReader
import org.slf4j.Logger
import scala.concurrent.duration.Duration
class MetricRegistryOwner(
reporter: MetricsReporter,
reportingInterval: Duration,
logger: Logger,
) extends ResourceOwner[MeterProvider] {
override def acquire()(implicit
context: ResourceContext
): Resource[MeterProvider] =
ResourceOwner.forCloseable(() => metricOwner).acquire()
private def metricOwner = {
val loggingMetricReader = PeriodicMetricReader
.builder(new Slf4jMetricExporter(logger))
.setInterval(reportingInterval.toMillis, TimeUnit.MILLISECONDS)
.newMetricReaderFactory()
val meterProviderBuilder = SdkMeterProvider
.builder()
reporter match {
case MetricsReporter.Console => meterProviderBuilder.registerMetricReader(loggingMetricReader)
case MetricsReporter.Prometheus(address) =>
meterProviderBuilder
.registerMetricReader(loggingMetricReader)
.registerMetricReader(
PrometheusHttpServer
.builder()
.setHost(address.getHostString)
.setPort(address.getPort)
.newMetricReaderFactory()
)
case _ => throw new IllegalArgumentException(s"Metric reporter $reporter not supported.")
}
meterProviderBuilder.build()
}
}

View File

@ -1,6 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
trait MetricValue

View File

@ -1,102 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import akka.actor.typed.scaladsl.Behaviors
import akka.actor.typed.{ActorRef, Behavior}
import com.daml.ledger.api.benchtool.util.TimeUtil
import java.time.{Clock, Duration, Instant}
object MetricsCollector {
sealed trait Message
object Message {
final case class NewValue[T](value: T) extends Message
final case class PeriodicReportRequest(replyTo: ActorRef[Response.PeriodicReportResponse])
extends Message
final case class FinalReportRequest(replyTo: ActorRef[Response.FinalReport]) extends Message
}
sealed trait Response
object Response {
sealed trait PeriodicReportResponse extends Response
final case class PeriodicReport(values: List[MetricValue]) extends PeriodicReportResponse
final case object ReportNotReady extends PeriodicReportResponse
final case class MetricFinalReportData(
name: String,
value: MetricValue,
violatedObjectives: List[(ServiceLevelObjective[_], MetricValue)],
)
final case class FinalReport(totalDuration: Duration, metricsData: List[MetricFinalReportData])
extends Response
}
def apply[T](
metrics: List[Metric[T]],
exposedMetrics: Option[ExposedMetrics[T]] = None,
): Behavior[Message] = {
val clock = Clock.systemUTC()
val startTime: Instant = clock.instant()
val minimumTimePeriodBetweenSubsequentReports: Duration = Duration.ofMillis(100)
new MetricsCollector[T](exposedMetrics, minimumTimePeriodBetweenSubsequentReports, clock)
.handlingMessages(metrics, startTime, startTime)
}
}
class MetricsCollector[T](
exposedMetrics: Option[ExposedMetrics[T]],
minimumTimePeriodBetweenSubsequentReports: Duration = Duration.ofMillis(100),
clock: Clock,
) {
import MetricsCollector._
import MetricsCollector.Message._
import MetricsCollector.Response._
@scala.annotation.nowarn("msg=.*is unchecked since it is eliminated by erasure")
def handlingMessages(
metrics: List[Metric[T]],
lastPeriodicCheck: Instant,
startTime: Instant,
): Behavior[Message] = {
Behaviors.receive { case (_, message) =>
message match {
case newValue: NewValue[T] =>
exposedMetrics.foreach(_.onNext(newValue.value))
handlingMessages(metrics.map(_.onNext(newValue.value)), lastPeriodicCheck, startTime)
case request: PeriodicReportRequest =>
val currentTime = clock.instant()
val periodSinceLastReport: Duration =
TimeUtil.durationBetween(lastPeriodicCheck, currentTime)
if (
TimeUtil.isAtLeast(periodSinceLastReport, minimumTimePeriodBetweenSubsequentReports)
) {
val (newMetrics, values) = metrics
.map(_.periodicValue(periodSinceLastReport))
.unzip
request.replyTo ! Response.PeriodicReport(values)
handlingMessages(newMetrics, currentTime, startTime)
} else {
request.replyTo ! Response.ReportNotReady
Behaviors.same
}
case request: FinalReportRequest =>
val duration = TimeUtil.durationBetween(startTime, clock.instant())
val data: List[MetricFinalReportData] =
metrics.map { metric =>
MetricFinalReportData(
name = metric.name,
value = metric.finalValue(duration),
violatedObjectives =
metric.violatedPeriodicObjectives ::: metric.violatedFinalObjectives(duration),
)
}
request.replyTo ! FinalReport(duration, data)
Behaviors.stopped
}
}
}
}

View File

@ -1,119 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import akka.actor.{Cancellable, CoordinatedShutdown}
import akka.actor.typed.scaladsl.AskPattern._
import akka.actor.typed.{ActorRef, ActorSystem, Props, SpawnProtocol}
import akka.util.Timeout
import com.daml.ledger.api.benchtool.metrics.MetricsCollector.Response
import com.daml.ledger.api.benchtool.util.ReportFormatter
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
trait MetricsManager[T] {
def sendNewValue(value: T): Unit
def result(): Future[BenchmarkResult]
}
case class MetricsManagerImpl[T](
collector: ActorRef[MetricsCollector.Message],
logInterval: FiniteDuration,
observedMetric: String,
)(implicit
system: ActorSystem[SpawnProtocol.Command]
) extends MetricsManager[T] {
def sendNewValue(value: T): Unit =
collector ! MetricsCollector.Message.NewValue(value)
def result(): Future[BenchmarkResult] = {
logger.debug(s"Requesting result of stream: $observedMetric")
periodicRequest.cancel()
implicit val timeout: Timeout = Timeout(3.seconds)
collector
.ask(MetricsCollector.Message.FinalReportRequest)
.map { response: MetricsCollector.Response.FinalReport =>
logger.info(
ReportFormatter.formatFinalReport(
streamName = observedMetric,
finalReport = response,
)
)
val atLeastOneObjectiveViolated = response.metricsData.exists(_.violatedObjectives.nonEmpty)
if (atLeastOneObjectiveViolated) BenchmarkResult.ObjectivesViolated
else BenchmarkResult.Ok
}(system.executionContext)
}
CoordinatedShutdown(system).addTask(
phase = CoordinatedShutdown.PhaseBeforeServiceUnbind,
taskName = "report-results",
) { () =>
logger.debug(s"Shutting down infrastructure for stream: $observedMetric")
result().map(_ => akka.Done)(system.executionContext)
}
private val periodicRequest: Cancellable =
system.scheduler.scheduleWithFixedDelay(logInterval, logInterval)(() => {
implicit val timeout: Timeout = Timeout(logInterval)
collector
.ask(MetricsCollector.Message.PeriodicReportRequest)
.collect {
case Response.ReportNotReady => ()
case response: Response.PeriodicReport =>
logger.info(
ReportFormatter.formatPeriodicReport(
streamName = observedMetric,
periodicReport = response,
)
)
}(system.executionContext)
()
})(system.executionContext)
private val logger = LoggerFactory.getLogger(getClass)
}
object MetricsManager {
def create[StreamElem](
observedMetric: String,
logInterval: FiniteDuration,
metrics: List[Metric[StreamElem]],
exposedMetrics: Option[ExposedMetrics[StreamElem]],
)(implicit
system: ActorSystem[SpawnProtocol.Command],
ec: ExecutionContext,
): Future[MetricsManager[StreamElem]] = {
implicit val timeout: Timeout = Timeout(3.seconds)
val collectorActor: Future[ActorRef[MetricsCollector.Message]] = system.ask(
SpawnProtocol.Spawn(
behavior = MetricsCollector(
metrics = metrics,
exposedMetrics = exposedMetrics,
),
name = s"$observedMetric-collector",
props = Props.empty,
_,
)
)
collectorActor.map(collector =>
MetricsManagerImpl[StreamElem](
collector = collector,
logInterval = logInterval,
observedMetric = observedMetric,
)
)
}
case class NoOpMetricsManager[T]() extends MetricsManager[T] {
override def sendNewValue(value: T): Unit = {
val _ = value
}
override def result(): Future[BenchmarkResult] = Future.successful(BenchmarkResult.Ok)
}
}

View File

@ -1,208 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.{Clock, Duration}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig._
import com.daml.ledger.api.benchtool.metrics.metrics.TotalRuntimeMetric
import com.daml.ledger.api.benchtool.metrics.metrics.TotalRuntimeMetric.MaxDurationObjective
import com.daml.ledger.api.v1.active_contracts_service.GetActiveContractsResponse
import com.daml.ledger.api.v1.command_completion_service.CompletionStreamResponse
import com.daml.ledger.api.v1.transaction_service.{
GetTransactionTreesResponse,
GetTransactionsResponse,
}
import com.daml.metrics.api.MetricHandle.MetricsFactory
import com.google.protobuf.timestamp.Timestamp
import scala.annotation.nowarn
import scala.concurrent.duration.FiniteDuration
object MetricsSet {
def transactionMetrics(
configO: Option[TransactionObjectives]
): List[Metric[GetTransactionsResponse]] =
transactionMetrics[GetTransactionsResponse](
countingFunction = response => countFlatTransactionsEvents(response).toInt,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = _.transactions.collect {
case t if t.effectiveAt.isDefined => t.getEffectiveAt
},
configO = configO,
)
def transactionExposedMetrics(
streamName: String,
@nowarn metricsFactory: MetricsFactory,
): ExposedMetrics[GetTransactionsResponse] =
ExposedMetrics[GetTransactionsResponse](
streamName = streamName,
factory = metricsFactory,
countingFunction = countFlatTransactionsEvents,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = Some(_.transactions.collect {
case t if t.effectiveAt.isDefined => t.getEffectiveAt
}),
)
def transactionTreesMetrics(
configO: Option[TransactionObjectives]
): List[Metric[GetTransactionTreesResponse]] =
transactionMetrics[GetTransactionTreesResponse](
countingFunction = response => countTreeTransactionsEvents(response).toInt,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = _.transactions.collect {
case t if t.effectiveAt.isDefined => t.getEffectiveAt
},
configO = configO,
)
def transactionTreesExposedMetrics(
streamName: String,
@nowarn metricsFactory: MetricsFactory,
): ExposedMetrics[GetTransactionTreesResponse] =
ExposedMetrics[GetTransactionTreesResponse](
streamName = streamName,
factory = metricsFactory,
countingFunction = countTreeTransactionsEvents,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = Some(_.transactions.collect {
case t if t.effectiveAt.isDefined => t.getEffectiveAt
}),
)
def activeContractsMetrics(
configO: Option[AcsAndCompletionsObjectives]
): List[Metric[GetActiveContractsResponse]] =
List[Metric[GetActiveContractsResponse]](
CountRateMetric.empty[GetActiveContractsResponse](
countingFunction = _.activeContracts.length,
periodicObjectives = Nil,
finalObjectives = List(
configO.flatMap(_.minItemRate.map(CountRateMetric.RateObjective.MinRate)),
configO.flatMap(_.maxItemRate.map(CountRateMetric.RateObjective.MaxRate)),
).flatten,
),
TotalCountMetric.empty[GetActiveContractsResponse](
countingFunction = countActiveContracts
),
SizeMetric.empty[GetActiveContractsResponse](
sizingFunction = _.serializedSize.toLong
),
) ++ optionalMaxDurationMetrics(configO)
@nowarn("cat=deprecation")
def activeContractsExposedMetrics(
streamName: String,
metricsFactory: MetricsFactory,
): ExposedMetrics[GetActiveContractsResponse] =
ExposedMetrics[GetActiveContractsResponse](
streamName = streamName,
factory = metricsFactory,
countingFunction = response => countActiveContracts(response).toLong,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = None,
)
def completionsMetrics(
configO: Option[AcsAndCompletionsObjectives]
): List[Metric[CompletionStreamResponse]] =
List[Metric[CompletionStreamResponse]](
CountRateMetric.empty(
countingFunction = _.completions.length,
periodicObjectives = Nil,
finalObjectives = List(
configO.flatMap(_.minItemRate.map(CountRateMetric.RateObjective.MinRate)),
configO.flatMap(_.maxItemRate.map(CountRateMetric.RateObjective.MaxRate)),
).flatten,
),
TotalCountMetric.empty(
countingFunction = countCompletions
),
SizeMetric.empty(
sizingFunction = _.serializedSize.toLong
),
) ++ optionalMaxDurationMetrics(configO)
@nowarn("cat=deprecation")
def completionsExposedMetrics(
streamName: String,
metricsFactory: MetricsFactory,
): ExposedMetrics[CompletionStreamResponse] =
ExposedMetrics[CompletionStreamResponse](
streamName = streamName,
factory = metricsFactory,
countingFunction = response => countCompletions(response).toLong,
sizingFunction = _.serializedSize.toLong,
recordTimeFunction = None,
)
private def transactionMetrics[T](
countingFunction: T => Int,
sizingFunction: T => Long,
recordTimeFunction: T => Seq[Timestamp],
configO: Option[TransactionObjectives],
): List[Metric[T]] = {
List[Metric[T]](
CountRateMetric.empty[T](
countingFunction = countingFunction,
periodicObjectives = Nil,
finalObjectives = List(
configO.flatMap(_.minItemRate.map(CountRateMetric.RateObjective.MinRate)),
configO.flatMap(_.maxItemRate.map(CountRateMetric.RateObjective.MaxRate)),
).flatten,
),
TotalCountMetric.empty[T](
countingFunction = countingFunction
),
ConsumptionSpeedMetric.empty[T](
recordTimeFunction = recordTimeFunction,
objective =
configO.flatMap(_.minConsumptionSpeed.map(ConsumptionSpeedMetric.MinConsumptionSpeed)),
),
DelayMetric.empty[T](
recordTimeFunction = recordTimeFunction,
clock = Clock.systemUTC(),
objective = configO.flatMap(_.maxDelaySeconds.map(DelayMetric.MaxDelay)),
),
SizeMetric.empty[T](
sizingFunction = sizingFunction
),
) ++ optionalMaxDurationMetrics(configO)
}
def countActiveContracts(response: GetActiveContractsResponse): Int =
response.activeContracts.length
def countCompletions(response: CompletionStreamResponse): Int =
response.completions.length
def countFlatTransactionsEvents(response: GetTransactionsResponse): Long =
response.transactions.foldLeft(0L)((acc, tx) => acc + tx.events.size)
def countTreeTransactionsEvents(response: GetTransactionTreesResponse): Long =
response.transactions.foldLeft(0L)((acc, tx) => acc + tx.eventsById.size)
private def optionalMaxDurationMetrics[T](
configO: Option[CommonObjectivesConfig]
): List[Metric[T]] = {
for {
config <- configO
maxRuntime <- config.maxTotalStreamRuntimeDuration
} yield createTotalRuntimeMetric[T](maxRuntime)
}.toList
def createTotalRuntimeMetric[T](maxRuntime: FiniteDuration): Metric[T] =
TotalRuntimeMetric.empty(
clock = Clock.systemUTC(),
startTime = Clock.systemUTC().instant(),
objective = MaxDurationObjective(maxValue = toJavaDuration(maxRuntime)),
)
protected[metrics] def toJavaDuration[T](maxStreamDuration: FiniteDuration) = {
Duration.ofNanos(maxStreamDuration.toNanos)
}
}

View File

@ -1,8 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
trait ServiceLevelObjective[MetricValueType <: MetricValue] {
def isViolatedBy(metricValue: MetricValueType): Boolean
}

View File

@ -1,50 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Duration
final case class SizeMetric[T](
sizingBytesFunction: T => Long,
currentSizeBytesBucket: Long = 0,
sizeRateList: List[Double] = List.empty,
) extends Metric[T] {
import SizeMetric._
override type V = Value
override def onNext(value: T): SizeMetric[T] = {
val addedBytesSize = sizingBytesFunction(value)
this.copy(currentSizeBytesBucket = currentSizeBytesBucket + addedBytesSize)
}
override def periodicValue(periodDuration: Duration): (Metric[T], Value) = {
val sizeRate = periodicSizeRate(periodDuration)
val updatedMetric = this.copy(
currentSizeBytesBucket = 0,
sizeRateList = sizeRate :: sizeRateList,
) // ok to prepend because the list is used only to calculate mean value so the order doesn't matter
(updatedMetric, Value(sizeRate))
}
override def finalValue(totalDuration: Duration): Value = {
val value = sizeRateList match {
case Nil => 0.0
case rates => rates.sum / rates.length
}
Value(value)
}
override def violatedFinalObjectives(totalDuration: Duration): List[(Objective, Value)] = Nil
private def periodicSizeRate(periodDuration: Duration): Double =
(currentSizeBytesBucket.toDouble / periodDuration.toMillis) * 1000.0 / (1024 * 1024)
}
object SizeMetric {
final case class Value(megabytesPerSecond: Double) extends MetricValue
def empty[T](sizingFunction: T => Long): SizeMetric[T] =
SizeMetric[T](sizingFunction)
}

View File

@ -1,36 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import akka.actor.typed.{ActorSystem, SpawnProtocol}
import org.slf4j.Logger
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
object StreamMetrics {
def observer[StreamElem](
streamName: String,
logInterval: FiniteDuration,
metrics: List[Metric[StreamElem]],
logger: Logger,
exposedMetrics: Option[ExposedMetrics[StreamElem]] = None,
itemCountingFunction: (StreamElem) => Long,
maxItemCount: Option[Long],
)(implicit
system: ActorSystem[SpawnProtocol.Command],
ec: ExecutionContext,
): Future[MeteredStreamObserver[StreamElem]] =
MetricsManager.create(streamName, logInterval, metrics, exposedMetrics).map { manager =>
new MeteredStreamObserver[StreamElem](
streamName = streamName,
logger = logger,
manager = manager,
itemCountingFunction = itemCountingFunction,
maxItemCount = maxItemCount,
)
}
}

View File

@ -1,35 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Duration
final case class TotalCountMetric[T](
countingFunction: T => Int,
counter: Int = 0,
lastCount: Int = 0,
) extends Metric[T] {
import TotalCountMetric._
override type V = Value
override def onNext(value: T): TotalCountMetric[T] =
this.copy(counter = counter + countingFunction(value))
override def periodicValue(periodDuration: Duration): (Metric[T], Value) =
(this.copy(lastCount = counter), Value(counter))
override def finalValue(totalDuration: Duration): Value =
Value(totalCount = counter)
override def violatedFinalObjectives(totalDuration: Duration): List[(Objective, Value)] = Nil
}
object TotalCountMetric {
final case class Value(totalCount: Int) extends MetricValue
def empty[T](
countingFunction: T => Int
): TotalCountMetric[T] = TotalCountMetric[T](countingFunction)
}

View File

@ -1,70 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics.metrics
import java.time.{Clock, Duration, Instant}
import com.daml.ledger.api.benchtool.metrics.{Metric, MetricValue, ServiceLevelObjective}
import com.daml.ledger.api.benchtool.metrics.metrics.TotalRuntimeMetric.{
MaxDurationObjective,
Value,
}
import com.daml.ledger.api.benchtool.util.TimeUtil
object TotalRuntimeMetric {
case class MaxDurationObjective(maxValue: Duration) extends ServiceLevelObjective[Value] {
override def isViolatedBy(value: Value): Boolean = value.v.compareTo(maxValue) > 0
}
def empty[T](
clock: Clock,
startTime: Instant,
objective: MaxDurationObjective,
): TotalRuntimeMetric[T] =
TotalRuntimeMetric[T](
clock = clock,
startTime = startTime,
objective = objective,
)
case class Value(v: Duration) extends MetricValue
}
/** Measures the total runtime since the set start time to the time of receiving the most recent item.
*/
case class TotalRuntimeMetric[T](
clock: Clock,
startTime: Instant,
objective: MaxDurationObjective,
) extends Metric[T] {
override type V = Value
override type Objective = MaxDurationObjective
// NOTE: There's no need to synchronize on this variable
// as this metric used solely as an internal state of an actor at 'com.daml.ledger.api.benchtool.metrics.MetricsCollector.handlingMessages'
private var lastSeenItemTime: Instant = startTime
override def onNext(item: T): Metric[T] = {
lastSeenItemTime = clock.instant()
this
}
override def periodicValue(periodDuration: Duration): (Metric[T], Value) =
this -> totalRuntime
override def finalValue(totalDuration: Duration): Value =
totalRuntime
override def violatedFinalObjectives(
totalDuration: Duration
): List[(MaxDurationObjective, Value)] =
if (objective.isViolatedBy(totalRuntime))
List((objective, totalRuntime))
else
List.empty
private def totalRuntime: Value = Value(TimeUtil.durationBetween(startTime, lastSeenItemTime))
}

View File

@ -1,51 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.active_contracts_service._
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.concurrent.Future
final class ActiveContractsService(
channel: Channel,
ledgerId: String,
authorizationToken: Option[String],
) {
private val logger = LoggerFactory.getLogger(getClass)
private val service: ActiveContractsServiceGrpc.ActiveContractsServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
ActiveContractsServiceGrpc.stub(channel)
)
def getActiveContracts[Result](
config: WorkflowConfig.StreamConfig.ActiveContractsStreamConfig,
observer: ObserverWithResult[GetActiveContractsResponse, Result],
): Future[Result] = {
getActiveContractsRequest(ledgerId, config) match {
case Right(request) =>
service.getActiveContracts(request, observer)
logger.info("Started fetching active contracts")
observer.result
case Left(error) =>
Future.failed(new RuntimeException(error))
}
}
private def getActiveContractsRequest(
ledgerId: String,
config: WorkflowConfig.StreamConfig.ActiveContractsStreamConfig,
): Either[String, GetActiveContractsRequest] =
StreamFilters.transactionFilters(config.filters).map { filters =>
GetActiveContractsRequest.defaultInstance
.withLedgerId(ledgerId)
.withFilter(filters)
}
}

View File

@ -1,62 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.command_completion_service.{
CommandCompletionServiceGrpc,
CompletionStreamRequest,
CompletionStreamResponse,
}
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.concurrent.Future
class CommandCompletionService(
channel: Channel,
ledgerId: String,
userId: String,
authorizationToken: Option[String],
) {
private val logger = LoggerFactory.getLogger(getClass)
private val service: CommandCompletionServiceGrpc.CommandCompletionServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
CommandCompletionServiceGrpc.stub(channel)
)
def completions[Result](
config: WorkflowConfig.StreamConfig.CompletionsStreamConfig,
observer: ObserverWithResult[CompletionStreamResponse, Result],
): Future[Result] = {
val request = completionsRequest(ledgerId, config)
service.completionStream(request, observer)
logger.info(s"Started fetching completions")
observer.result
}
private def completionsRequest(
ledgerId: String,
config: WorkflowConfig.StreamConfig.CompletionsStreamConfig,
): CompletionStreamRequest = {
if (authorizationToken.isDefined) {
assert(
userId == config.applicationId,
s"When using user based authorization applicationId (${config.applicationId}) must be equal to userId ($userId)",
)
}
val request = CompletionStreamRequest.defaultInstance
.withLedgerId(ledgerId)
.withParties(config.parties)
.withApplicationId(config.applicationId)
config.beginOffset match {
case Some(offset) => request.withOffset(offset)
case None => request
}
}
}

View File

@ -1,30 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.command_service._
import com.daml.ledger.api.v1.commands.Commands
import com.google.protobuf.empty.Empty
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal
class CommandService(channel: Channel, authorizationToken: Option[String]) {
private val logger = LoggerFactory.getLogger(getClass)
private val service: CommandServiceGrpc.CommandServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(CommandServiceGrpc.stub(channel))
def submitAndWait(commands: Commands)(implicit ec: ExecutionContext): Future[Empty] =
service
.submitAndWait(new SubmitAndWaitRequest(Some(commands)))
.recoverWith { case NonFatal(ex) =>
Future.failed {
logger.error(s"Command submission error. Details: ${ex.getLocalizedMessage}", ex)
ex
}
}
}

View File

@ -1,34 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.command_submission_service._
import com.daml.ledger.api.v1.commands.Commands
import com.google.protobuf.empty.Empty
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal
class CommandSubmissionService(channel: Channel, authorizationToken: Option[String]) {
private val logger = LoggerFactory.getLogger(getClass)
private val service: CommandSubmissionServiceGrpc.CommandSubmissionServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
CommandSubmissionServiceGrpc.stub(channel)
)
def submit(commands: Commands)(implicit ec: ExecutionContext): Future[Empty] =
service
.submit(new SubmitRequest(Some(commands)))
.recoverWith { case NonFatal(ex) =>
Future.failed {
logger.error(s"Command submission error. Details: ${ex.getLocalizedMessage}", ex)
ex
}
}
}

View File

@ -1,70 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.platform.localstore.api.UserManagementStore
import io.grpc.Channel
import scala.concurrent.{ExecutionContext, Future}
class LedgerApiServices(
channel: Channel,
val ledgerId: String,
userId: String,
authorizationHelper: Option[AuthorizationHelper],
) {
private val authorizationToken: Option[String] = authorizationHelper.map(_.tokenFor(userId))
val activeContractsService =
new ActiveContractsService(channel, ledgerId, authorizationToken = authorizationToken)
val commandService = new CommandService(channel, authorizationToken = authorizationToken)
val commandSubmissionService =
new CommandSubmissionService(channel, authorizationToken = authorizationToken)
val commandCompletionService =
new CommandCompletionService(
channel,
ledgerId,
userId = userId,
authorizationToken = authorizationToken,
)
val packageManagementService =
new PackageManagementService(channel, authorizationToken = authorizationToken)
val pruningService = new PruningService(channel, authorizationToken = authorizationToken)
val packageService = new PackageService(channel, authorizationToken = authorizationToken)
val partyManagementService =
new PartyManagementService(channel, authorizationToken = authorizationToken)
val transactionService =
new TransactionService(channel, ledgerId, authorizationToken = authorizationToken)
val userManagementService = new UserManagementService(channel, authorizationToken)
}
object LedgerApiServices {
/** @return factory function for creating optionally authorized services for a given userId
*/
def forChannel(
authorizationHelper: Option[AuthorizationHelper],
channel: Channel,
)(implicit ec: ExecutionContext): Future[String => LedgerApiServices] = {
val ledgerIdentityService: LedgerIdentityService =
new LedgerIdentityService(
channel = channel,
authorizationToken =
authorizationHelper.map(_.tokenFor(UserManagementStore.DefaultParticipantAdminUserId)),
)
ledgerIdentityService
.fetchLedgerId()
.map(ledgerId =>
(userId: String) =>
new LedgerApiServices(
channel,
ledgerId,
userId = userId,
authorizationHelper = authorizationHelper,
)
)
}
}

View File

@ -1,49 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.ledger_identity_service.{
GetLedgerIdentityRequest,
LedgerIdentityServiceGrpc,
}
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.annotation.nowarn
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
final class LedgerIdentityService(channel: Channel, authorizationToken: Option[String]) {
private val logger = LoggerFactory.getLogger(getClass)
private val service =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
LedgerIdentityServiceGrpc.stub(channel)
): @nowarn(
"cat=deprecation&origin=com\\.daml\\.ledger\\.api\\.v1\\.ledger_identity_service\\..*"
)
@nowarn("cat=deprecation&origin=com\\.daml\\.ledger\\.api\\.v1\\.ledger_identity_service\\..*")
def fetchLedgerId()(implicit ec: ExecutionContext): Future[String] =
service
.getLedgerIdentity(
new GetLedgerIdentityRequest()
)
.transformWith {
case Success(response) =>
Future.successful {
logger.info(s"Fetched ledger ID: ${response.ledgerId}")
response.ledgerId
}
case Failure(exception) =>
Future.failed {
logger.error(
s"Error during fetching of the ledger id. Details: ${exception.getLocalizedMessage}",
exception,
)
exception
}
}
}

View File

@ -1,29 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.admin.package_management_service.{
PackageManagementServiceGrpc,
UploadDarFileRequest,
}
import com.google.protobuf.ByteString
import io.grpc.Channel
import scala.concurrent.{ExecutionContext, Future}
class PackageManagementService(channel: Channel, authorizationToken: Option[String]) {
private val service =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
PackageManagementServiceGrpc.stub(channel)
)
def uploadDar(bytes: ByteString, submissionId: String)(implicit
ec: ExecutionContext
): Future[Unit] =
service
.uploadDarFile(new UploadDarFileRequest(bytes, submissionId))
.map(_ => ())
}

View File

@ -1,23 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.package_service._
import io.grpc.Channel
import scala.concurrent.Future
class PackageService(channel: Channel, authorizationToken: Option[String]) {
private val service =
AuthorizationHelper.maybeAuthedService(authorizationToken)(PackageServiceGrpc.stub(channel))
def getPackage(packageId: String): Future[GetPackageResponse] = {
service.getPackage(GetPackageRequest(packageId = packageId))
}
def listPackages(): Future[ListPackagesResponse] = {
service.listPackages(ListPackagesRequest())
}
}

View File

@ -1,50 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.admin.party_management_service.{
AllocatePartyRequest,
ListKnownPartiesRequest,
PartyManagementServiceGrpc,
}
import com.daml.ledger.client.binding.Primitive.Party
import io.grpc.Channel
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
class PartyManagementService(channel: Channel, authorizationToken: Option[String]) {
private val logger: Logger = LoggerFactory.getLogger(getClass)
private val service: PartyManagementServiceGrpc.PartyManagementServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
PartyManagementServiceGrpc.stub(channel)
)
def listKnownParties()(implicit ec: ExecutionContext): Future[Set[String]] = {
service.listKnownParties(new ListKnownPartiesRequest()).map(_.partyDetails.map(_.party).toSet)
}
def allocateParty(hint: String)(implicit ec: ExecutionContext): Future[Party] = {
service
.allocateParty(new AllocatePartyRequest(partyIdHint = hint))
.transformWith {
case Success(response) =>
Future.successful {
val party = Party(response.partyDetails.get.party)
logger.info(s"Allocated party: $party")
party
}
case Failure(exception) =>
Future.failed {
logger.error(
s"Error during party allocation. Details: ${exception.getLocalizedMessage}",
exception,
)
exception
}
}
}
}

View File

@ -1,25 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.admin.participant_pruning_service.{
ParticipantPruningServiceGrpc,
PruneRequest,
PruneResponse,
}
import io.grpc.Channel
import scala.concurrent.Future
class PruningService(channel: Channel, authorizationToken: Option[String]) {
private val service: ParticipantPruningServiceGrpc.ParticipantPruningServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
ParticipantPruningServiceGrpc.stub(channel)
)
def prune(request: PruneRequest): Future[PruneResponse] = {
service.prune(request)
}
}

View File

@ -1,72 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.v1.transaction_filter.{
Filters,
InclusiveFilters,
InterfaceFilter,
TransactionFilter,
}
import com.daml.ledger.api.v1.value.Identifier
object StreamFilters {
def transactionFilters(
filters: List[WorkflowConfig.StreamConfig.PartyFilter]
): Either[String, TransactionFilter] =
toEitherList(filters.map(toTransactionFilter))
.map { byPartyFilters =>
TransactionFilter.defaultInstance.withFiltersByParty(byPartyFilters.toMap)
}
private def toTransactionFilter(
filter: WorkflowConfig.StreamConfig.PartyFilter
): Either[String, (String, Filters)] =
((filter.templates, filter.interfaces) match {
case (Nil, Nil) =>
Right(Filters.defaultInstance)
case (templateIds, interfaceIds) =>
for {
tplIds <- templateIdentifiers(templateIds)
ifaceIds <- templateIdentifiers(interfaceIds)
} yield {
val interfaceFilters =
ifaceIds.map(interfaceId => InterfaceFilter(Some(interfaceId), true))
Filters.defaultInstance.withInclusive(
InclusiveFilters.defaultInstance
.addAllTemplateIds(tplIds)
.addAllInterfaceFilters(interfaceFilters)
)
}
}).map(templateFilters => filter.party -> templateFilters)
private def templateIdentifiers(templates: List[String]): Either[String, List[Identifier]] =
toEitherList(templates.map(templateIdFromString))
private def templateIdFromString(fullyQualifiedTemplateId: String): Either[String, Identifier] =
fullyQualifiedTemplateId
.split(':')
.toList match {
case packageId :: moduleName :: entityName :: Nil =>
Right(
Identifier.defaultInstance
.withEntityName(entityName)
.withModuleName(moduleName)
.withPackageId(packageId)
)
case _ =>
Left(s"Invalid template id: $fullyQualifiedTemplateId")
}
private def toEitherList[L, R](l: List[Either[L, R]]): Either[L, List[R]] =
l.foldLeft[Either[L, List[R]]](Right(List.empty[R])) { case (acc, next) =>
for {
elems <- acc
elem <- next
} yield elem :: elems
}
}

View File

@ -1,100 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import com.daml.ledger.api.v1.transaction_service.{
GetLedgerEndRequest,
GetTransactionTreesResponse,
GetTransactionsRequest,
GetTransactionsResponse,
TransactionServiceGrpc,
}
import io.grpc.Channel
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
final class TransactionService(
channel: Channel,
ledgerId: String,
authorizationToken: Option[String],
) {
private val logger = LoggerFactory.getLogger(getClass)
private val service: TransactionServiceGrpc.TransactionServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(TransactionServiceGrpc.stub(channel))
def getLedgerEnd()(implicit ec: ExecutionContext): Future[String] = for {
response <- service.getLedgerEnd(new GetLedgerEndRequest())
ledgerOffset = response.offset.getOrElse(
sys.error("Ledger end offset response contained no offset")
)
} yield ledgerOffset.getAbsolute
def transactions[Result](
config: WorkflowConfig.StreamConfig.TransactionsStreamConfig,
observer: ObserverWithResult[GetTransactionsResponse, Result],
): Future[Result] =
getTransactionsRequest(
ledgerId = ledgerId,
filters = config.filters,
beginOffset = config.beginOffset,
endOffset = config.endOffset,
) match {
case Right(request) =>
service.getTransactions(request, observer)
logger.info("Started fetching transactions")
observer.result
case Left(error) =>
Future.failed(new RuntimeException(error))
}
def transactionTrees[Result](
config: WorkflowConfig.StreamConfig.TransactionTreesStreamConfig,
observer: ObserverWithResult[
GetTransactionTreesResponse,
Result,
],
): Future[Result] =
getTransactionsRequest(
ledgerId = ledgerId,
filters = config.filters,
beginOffset = config.beginOffset,
endOffset = config.endOffset,
) match {
case Right(request) =>
service.getTransactionTrees(request, observer)
logger.info("Started fetching transaction trees")
observer.result
case Left(error) =>
Future.failed(new RuntimeException(error))
}
private def getTransactionsRequest(
ledgerId: String,
filters: List[WorkflowConfig.StreamConfig.PartyFilter],
beginOffset: Option[LedgerOffset],
endOffset: Option[LedgerOffset],
): Either[String, GetTransactionsRequest] =
StreamFilters
.transactionFilters(filters)
.map { filters =>
val getTransactionsRequest = GetTransactionsRequest.defaultInstance
.withLedgerId(ledgerId)
.withBegin(beginOffset.getOrElse(ledgerBeginOffset))
.withFilter(filters)
endOffset match {
case Some(end) => getTransactionsRequest.withEnd(end)
case None => getTransactionsRequest
}
}
private def ledgerBeginOffset: LedgerOffset =
LedgerOffset().withBoundary(LedgerOffset.LedgerBoundary.LEDGER_BEGIN)
}

View File

@ -1,84 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.services
import com.daml.error.definitions.LedgerApiErrors
import com.daml.error.utils.ErrorDetails
import com.daml.ledger.api.benchtool.AuthorizationHelper
import com.daml.ledger.api.v1.admin.user_management_service.{
CreateUserRequest,
GrantUserRightsRequest,
User,
UserManagementServiceGrpc,
Right => UserRight,
}
import io.grpc.{Channel, StatusRuntimeException}
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.{ExecutionContext, Future}
class UserManagementService(channel: Channel, authorizationToken: Option[String]) {
private val logger: Logger = LoggerFactory.getLogger(getClass)
private val service: UserManagementServiceGrpc.UserManagementServiceStub =
AuthorizationHelper.maybeAuthedService(authorizationToken)(
UserManagementServiceGrpc.stub(channel)
)
def createUserOrGrantRightsToExisting(
userId: String,
observerPartyNames: Seq[String],
signatoryPartyName: String,
)(implicit ec: ExecutionContext): Future[Unit] = {
val rights = userRights(observerPartyNames, signatoryPartyName)
createUser(userId, rights).recoverWith {
case e: StatusRuntimeException
if ErrorDetails.matches(e, LedgerApiErrors.Admin.UserManagement.UserAlreadyExists) =>
logger.info(
s"Benchmark user already exists (received error: ${e.getStatus.getDescription}) so granting rights the existing user."
)
grantUserRights(userId, rights)
}
}
private def createUser(
userId: String,
rights: Seq[UserRight],
)(implicit ec: ExecutionContext): Future[Unit] = {
logger.info(s"Creating a user: '$userId' with rights: ${rights.mkString(", ")}")
service
.createUser(
CreateUserRequest(
user = Some(User(id = userId, primaryParty = "")),
rights = rights,
)
)
.map(_ => ())
}
private def grantUserRights(
userId: String,
rights: Seq[UserRight],
)(implicit ec: ExecutionContext): Future[Unit] = {
logger.info(s"Granting rights: ${rights.mkString(", ")} to the user: $userId")
service
.grantUserRights(
GrantUserRightsRequest(
userId = userId,
rights = rights,
)
)
.map(_ => ())
}
private def userRights(
observerPartyNames: Seq[String],
signatoryPartyName: String,
): Seq[UserRight] = {
val actAs = UserRight(UserRight.Kind.CanActAs(UserRight.CanActAs(signatoryPartyName)))
val readAs = observerPartyNames.map(observerPartyName =>
UserRight(UserRight.Kind.CanReadAs(UserRight.CanReadAs(observerPartyName)))
)
actAs +: readAs
}
}

View File

@ -1,49 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com
package daml.ledger.api.benchtool.submission
import daml.ledger.api.v1.value.Value
import scala.collection.mutable
/** Keeps track of contract keys of contracts that haven't been used up (archived) yet.
* Allows to select the next contract key to use up at random.
*/
final class ActiveContractKeysPool(randomnessProvider: RandomnessProvider) {
private val poolsPerTemplate = mutable.Map.empty[String, DepletingUniformRandomPool[Value]]
def getAndRemoveContractKey(templateName: String): Value = synchronized {
val pool = poolsPerTemplate(templateName)
pool.pop()
}
def addContractKey(templateName: String, key: Value): Unit = synchronized {
if (!poolsPerTemplate.contains(templateName)) {
poolsPerTemplate.put(templateName, new DepletingUniformRandomPool(randomnessProvider))
}
val pool = poolsPerTemplate(templateName)
pool.put(key)
}
}
/** A pool of elements supporting two operations:
* 1. pop() - select an element uniformly at random and remove it from the pool.
* 2. put() - add an element to the pool
*/
final class DepletingUniformRandomPool[V](randomnessProvider: RandomnessProvider) {
private val buffer = mutable.ArrayBuffer.empty[V]
def pop(): V = {
val v = buffer.last
buffer.remove(index = buffer.size - 1, count = 1)
v
}
def put(v: V): Unit = {
val i = randomnessProvider.randomNatural(buffer.size + 1)
buffer.insert(index = i, elem = v)
}
}

View File

@ -1,55 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.client.binding.Primitive
case class AllocatedParties(
signatoryO: Option[Primitive.Party],
observers: List[Primitive.Party],
divulgees: List[Primitive.Party],
extraSubmitters: List[Primitive.Party],
observerPartySets: List[AllocatedPartySet],
) {
val allAllocatedParties: List[Primitive.Party] =
signatoryO.toList ++ observers ++ divulgees ++ extraSubmitters ++ observerPartySets.flatMap(
_.parties
)
/** NOTE: This is guaranteed to be safe only for runs with synthetic data generated by Benchtool
*/
def signatory: Primitive.Party = signatoryO.getOrElse(sys.error("Signatory party not found!"))
}
object AllocatedParties {
/** @param partyPrefixesForPartySets - get converted to main party prefixes and then used to find party sets
*/
def forExistingParties(
parties: List[String],
partyPrefixesForPartySets: List[String],
): AllocatedParties = {
val partiesByMainPrefixMap: Map[String, List[Primitive.Party]] = parties
.groupBy(Names.parsePartyNameMainPrefix)
.view
.mapValues(_.map(Primitive.Party(_)))
.toMap
val observerPartySets = for {
partySetPrefix <- partyPrefixesForPartySets.map(Names.parsePartyNameMainPrefix)
parties <- partiesByMainPrefixMap.get(partySetPrefix)
} yield AllocatedPartySet(
mainPartyNamePrefix = partySetPrefix,
parties = parties,
)
val signatories = partiesByMainPrefixMap.getOrElse(Names.SignatoryPrefix, List.empty)
AllocatedParties(
// NOTE: For synthetic streams signatory is always present
signatoryO = signatories.headOption,
observers = partiesByMainPrefixMap.getOrElse(Names.ObserverPrefix, List.empty),
divulgees = partiesByMainPrefixMap.getOrElse(Names.DivulgeePrefix, List.empty),
extraSubmitters = partiesByMainPrefixMap.getOrElse(Names.ExtraSubmitterPrefix, List.empty),
observerPartySets = observerPartySets,
)
}
}

View File

@ -1,20 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.client.binding.Primitive
import scalaz.syntax.tag._
case class AllocatedPartySet(
mainPartyNamePrefix: String,
parties: List[Primitive.Party],
) {
{
val offenders = parties.iterator.filterNot(_.unwrap.startsWith(mainPartyNamePrefix)).toList
require(
offenders.isEmpty,
s"All party names in party-set '$mainPartyNamePrefix' must start with prefix $mainPartyNamePrefix. Found offenders: $offenders",
)
}
}

View File

@ -1,20 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.test.benchtool.Foo.Foo1
import scalaz.syntax.tag._
case class BenchtoolTestsPackageInfo(
packageId: String
)
object BenchtoolTestsPackageInfo {
val BenchtoolTestsPackageName = "benchtool-tests"
// The packageId obtained from the compiled Scala bindings
val StaticDefault: BenchtoolTestsPackageInfo =
BenchtoolTestsPackageInfo(packageId = Foo1.id.unwrap.packageId)
}

View File

@ -1,17 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.v1.commands.Command
import com.daml.ledger.client.binding.Primitive
import scala.util.Try
trait CommandGenerator {
def next(): Try[Seq[Command]]
def nextApplicationId(): String
def nextExtraCommandSubmitters(): List[Primitive.Party]
}

View File

@ -1,288 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Sink, Source}
import akka.stream.{Materializer, OverflowStrategy}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.SubmissionConfig
import com.daml.ledger.api.benchtool.infrastructure.TestDars
import com.daml.ledger.api.benchtool.metrics.LatencyMetric.LatencyNanos
import com.daml.ledger.api.benchtool.metrics.MetricsManager
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.v1.commands.{Command, Commands}
import com.daml.ledger.client
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.resources.{ResourceContext, ResourceOwner}
import com.daml.metrics.api.MetricHandle.{MetricsFactory, Timer}
import com.daml.metrics.api.MetricName
import io.grpc.Status
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.annotation.nowarn
import scala.concurrent.duration.DurationInt
import scala.concurrent.{ExecutionContext, Future}
import scala.util.chaining._
import scala.util.control.NonFatal
@nowarn("cat=deprecation")
case class CommandSubmitter(
names: Names,
benchtoolUserServices: LedgerApiServices,
adminServices: LedgerApiServices,
partyAllocating: PartyAllocating,
metricsFactory: MetricsFactory,
metricsManager: MetricsManager[LatencyNanos],
waitForSubmission: Boolean,
commandGenerationParallelism: Int = 8,
maxInFlightCommandsOverride: Option[Int] = None,
) {
private val logger = LoggerFactory.getLogger(getClass)
private val submitLatencyTimer = if (waitForSubmission) {
metricsFactory.timer(MetricName("daml_submit_and_wait_latency"))
} else {
metricsFactory.timer(MetricName("daml_submit_latency"))
}
def prepare(config: SubmissionConfig)(implicit
ec: ExecutionContext
): Future[AllocatedParties] = {
logger.info(s"Identifier suffix: ${names.identifierSuffix}")
(for {
allocatedParties <- partyAllocating.allocateParties(config)
_ <- uploadTestDars()
} yield allocatedParties)
.recoverWith { case NonFatal(ex) =>
logger.error(
s"Command submission preparation failed. Details: ${ex.getLocalizedMessage}",
ex,
)
Future.failed(CommandSubmitter.CommandSubmitterError(ex.getLocalizedMessage, ex))
}
}
def submitSingleBatch(
commandId: String,
actAs: Seq[Primitive.Party],
commands: Seq[Command],
)(implicit
ec: ExecutionContext
): Future[Unit] = {
submit(
id = commandId,
actAs = actAs,
commands = commands,
applicationId = names.benchtoolApplicationId,
useSubmitAndWait = true,
)
}
def generateAndSubmit(
generator: CommandGenerator,
config: SubmissionConfig,
baseActAs: List[client.binding.Primitive.Party],
maxInFlightCommands: Int,
submissionBatchSize: Int,
)(implicit ec: ExecutionContext): Future[Unit] = {
logger.info("Generating contracts...")
(for {
_ <- submitCommands(
generator = generator,
config = config,
maxInFlightCommands = maxInFlightCommands,
submissionBatchSize = submissionBatchSize,
baseActAs = baseActAs,
)
} yield {
logger.info("Commands submitted successfully.")
()
})
.recoverWith { case NonFatal(ex) =>
logger.error(s"Command submission failed. Details: ${ex.getLocalizedMessage}", ex)
Future.failed(CommandSubmitter.CommandSubmitterError(ex.getLocalizedMessage, ex))
}
}
private def uploadDar(dar: TestDars.DarFile, submissionId: String)(implicit
ec: ExecutionContext
): Future[Unit] =
adminServices.packageManagementService.uploadDar(
bytes = dar.bytes,
submissionId = submissionId,
)
private def uploadTestDars()(implicit ec: ExecutionContext): Future[Unit] = {
logger.info("Uploading dars...")
for {
dars <- Future.delegate { Future.fromTry(TestDars.readAll()) }
_ <- Future.sequence {
dars.zipWithIndex
.map { case (dar, index) =>
uploadDar(dar, names.darId(index))
}
}
} yield {
logger.info("Uplading dars completed")
}
}
private def submit(
id: String,
actAs: Seq[Primitive.Party],
commands: Seq[Command],
applicationId: String,
useSubmitAndWait: Boolean,
)(implicit
ec: ExecutionContext
): Future[Unit] = {
def makeCommands(commands: Seq[Command]) = new Commands(
ledgerId = benchtoolUserServices.ledgerId,
applicationId = applicationId,
commandId = id,
actAs = actAs.map(_.unwrap),
commands = commands,
workflowId = names.workflowId,
)
(if (useSubmitAndWait) {
benchtoolUserServices.commandService.submitAndWait(makeCommands(commands))
} else {
benchtoolUserServices.commandSubmissionService.submit(makeCommands(commands))
}).map(_ => ())
}
private def submitCommands(
generator: CommandGenerator,
config: SubmissionConfig,
baseActAs: List[Primitive.Party],
maxInFlightCommands: Int,
submissionBatchSize: Int,
)(implicit
ec: ExecutionContext
): Future[Unit] = {
implicit val resourceContext: ResourceContext = ResourceContext(ec)
val numBatches: Int = config.numberOfInstances / submissionBatchSize
val progressMeter = CommandSubmitter.ProgressMeter(config.numberOfInstances)
// Output a log line roughly once per 10% progress, or once every 10000 submissions (whichever comes first)
val progressLogInterval = math.min(config.numberOfInstances / 10 + 1, 10000)
val progressLoggingSink = {
var lastInterval = 0
Sink.foreach[Int](index =>
if (index / progressLogInterval != lastInterval) {
lastInterval = index / progressLogInterval
logger.info(progressMeter.getProgress(index))
}
)
}
logger.info(
s"Submitting commands ($numBatches commands, $submissionBatchSize contracts per command)..."
)
materializerOwner()
.use { implicit materializer =>
for {
_ <- Source
.fromIterator(() => (1 to config.numberOfInstances).iterator)
.wireTap(i => if (i == 1) progressMeter.start())
.mapAsync(commandGenerationParallelism)(index =>
Future.fromTry(
generator.next().map(cmd => index -> cmd)
)
)
.groupedWithin(submissionBatchSize, 1.minute)
.map(cmds => cmds.head._1 -> cmds.map(_._2).toList)
.buffer(maxInFlightCommands, OverflowStrategy.backpressure)
.mapAsync(maxInFlightCommandsOverride.getOrElse(maxInFlightCommands)) {
case (index, commands) =>
timed(submitLatencyTimer, metricsManager) {
submit(
id = names.commandId(index),
actAs = baseActAs ++ generator.nextExtraCommandSubmitters(),
commands = commands.flatten,
applicationId = generator.nextApplicationId(),
useSubmitAndWait = config.waitForSubmission,
)
}
.map(_ => index + commands.length - 1)
.recoverWith {
case e: io.grpc.StatusRuntimeException
if e.getStatus.getCode == Status.Code.ABORTED =>
logger.info(s"Flow rate limited at index $index: ${e.getLocalizedMessage}")
Thread.sleep(10) // Small back-off period
Future.successful(index + commands.length - 1)
case ex =>
logger.error(
s"Command submission failed. Details: ${ex.getLocalizedMessage}",
ex,
)
Future.failed(
CommandSubmitter.CommandSubmitterError(ex.getLocalizedMessage, ex)
)
}
}
.runWith(progressLoggingSink)
} yield ()
}
}
private def materializerOwner(): ResourceOwner[Materializer] = {
for {
actorSystem <- ResourceOwner.forActorSystem(() => ActorSystem("CommandSubmissionSystem"))
materializer <- ResourceOwner.forMaterializer(() => Materializer(actorSystem))
} yield materializer
}
private def timed[O](timer: Timer, metricsManager: MetricsManager[LatencyNanos])(
f: => Future[O]
)(implicit ec: ExecutionContext) = {
val ctx = timer.startAsync()
val startNanos = System.nanoTime()
f.map(_.tap { _ =>
ctx.stop()
val endNanos = System.nanoTime()
metricsManager.sendNewValue(endNanos - startNanos)
})
}
}
object CommandSubmitter {
case class CommandSubmitterError(msg: String, cause: Throwable)
extends RuntimeException(msg, cause)
case class SubmissionSummary(observers: List[Primitive.Party])
class ProgressMeter(totalItems: Int) {
var startTimeMillis: Long = System.currentTimeMillis()
def start(): Unit = {
startTimeMillis = System.currentTimeMillis()
}
def getProgress(index: Int): String =
f"Progress: $index/$totalItems (${percentage(index)}%1.1f%%). Elapsed time: ${elapsedSeconds}%1.1f s. Remaining time: ${remainingSeconds(index)}%1.1f s"
private def percentage(index: Int): Double = (index.toDouble / totalItems) * 100
private def elapsedSeconds: Double =
(System.currentTimeMillis() - startTimeMillis).toDouble / 1000
private def remainingSeconds(index: Int): Double = {
val remainingItems = totalItems - index
if (remainingItems > 0) {
val timePerItem: Double = elapsedSeconds / index
remainingItems * timePerItem
} else {
0.0
}
}
}
object ProgressMeter {
def apply(totalItems: Int) = new ProgressMeter(
totalItems = totalItems
)
}
}

View File

@ -1,24 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
/** Allows to pseudo-randomly pick an index out of a set of indices according to their weights. */
class Distribution[T](weights: List[Int], items: IndexedSeq[T]) {
assert(weights.nonEmpty, "Weights list must not be empty.")
assert(weights.size == items.size, "The number of weights and items must be the same.")
assert(!weights.exists(_ < 1), "Weights must be strictly positive.")
private val totalWeight: Long = weights.map(_.toLong).sum
private val distribution: List[Double] =
weights.scanLeft(0)((sum, weight) => sum + weight).map(_.toDouble / totalWeight).tail
def choose(randomDouble: Double): T = items(index(randomDouble))
private[submission] def index(randomDouble: Double): Int = {
assert(randomDouble < 1.0, "Given random double must be < 1.0.")
// Consider changing implementation to use binary search when using on large lists.
distribution.indexWhere(_ > randomDouble)
}
}

View File

@ -1,62 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FibonacciSubmissionConfig
import com.daml.ledger.api.v1.commands.{Command, CreateAndExerciseCommand}
import com.daml.ledger.api.v1.value.{Identifier, Record, RecordField, Value}
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.test.benchtool.Bench.InefficientFibonacci.toNamedArguments
import scala.util.{Success, Try}
final class FibonacciCommandGenerator(
config: FibonacciSubmissionConfig,
signatory: Primitive.Party,
names: Names,
) extends CommandGenerator {
override def nextApplicationId(): String = names.benchtoolApplicationId
override def nextExtraCommandSubmitters(): List[Primitive.Party] = List.empty
def next(): Try[Seq[Command]] = {
Success(
Seq(
Command(
Command.Command.CreateAndExercise(
CreateAndExerciseCommand(
templateId = Some(
com.daml.ledger.test.benchtool.Bench.InefficientFibonacci.id
.asInstanceOf[Identifier]
),
createArguments = Some(
toNamedArguments(
com.daml.ledger.test.benchtool.Bench.InefficientFibonacci(signatory)
)
),
choice = "InefficientFibonacci_Compute",
choiceArgument = Some(
Value(
Value.Sum.Record(
Record(
None,
Seq(
RecordField(
label = "value",
value = Some(Value(Value.Sum.Int64(config.value.toLong))),
)
),
)
)
)
),
)
)
)
)
)
}
}

View File

@ -1,326 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import java.util.concurrent.atomic.AtomicLong
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig
import com.daml.ledger.api.benchtool.submission.foo.RandomPartySelecting
import com.daml.ledger.api.v1.commands.{Command, ExerciseByKeyCommand}
import com.daml.ledger.api.v1.value.{Identifier, Record, RecordField, Value}
import com.daml.ledger.client.binding
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.test.benchtool.Foo._
import scala.util.control.NonFatal
import scala.util.{Failure, Try}
/** @param divulgeesToDivulgerKeyMap map whose keys are sorted divulgees lists
*/
final class FooCommandGenerator(
config: FooSubmissionConfig,
allocatedParties: AllocatedParties,
divulgeesToDivulgerKeyMap: Map[Set[Primitive.Party], Value],
names: Names,
partySelecting: RandomPartySelecting,
randomnessProvider: RandomnessProvider,
) extends CommandGenerator {
private val activeContractKeysPool = new ActiveContractKeysPool(randomnessProvider)
private val contractDescriptions = new Distribution[FooSubmissionConfig.ContractDescription](
weights = config.instanceDistribution.map(_.weight),
items = config.instanceDistribution.toIndexedSeq,
)
private val applicationIdsDistributionO: Option[Distribution[FooSubmissionConfig.ApplicationId]] =
Option.when(config.applicationIds.nonEmpty)(
new Distribution(
weights = config.applicationIds.map(_.weight),
items = config.applicationIds.toIndexedSeq,
)
)
override def next(): Try[Seq[Command]] =
(for {
(contractDescription, partySelection) <- Try(
(
pickContractDescription(),
partySelecting.nextPartiesForContracts(),
)
)
divulgees = partySelection.divulgees.toSet
createContractPayload <- Try(randomPayload(contractDescription.payloadSizeBytes))
command = createCommands(
templateDescriptor = FooTemplateDescriptor.forName(contractDescription.template),
signatory = allocatedParties.signatory,
observers = partySelection.observers,
divulgerContractKeyO =
if (divulgees.isEmpty) None else divulgeesToDivulgerKeyMap.get(divulgees),
payload = createContractPayload,
)
} yield command).recoverWith { case NonFatal(ex) =>
Failure(
FooCommandGenerator.CommandGeneratorError(
msg = s"Command generation failed. Details: ${ex.getLocalizedMessage}",
cause = ex,
)
)
}
override def nextApplicationId(): String = {
applicationIdsDistributionO.fold(
names.benchtoolApplicationId
)(applicationIdsDistribution =>
applicationIdsDistribution
.choose(randomnessProvider.randomDouble())
.applicationId
)
}
override def nextExtraCommandSubmitters(): List[Primitive.Party] = {
partySelecting.nextExtraSubmitter()
}
private def pickContractDescription(): FooSubmissionConfig.ContractDescription =
contractDescriptions.choose(randomnessProvider.randomDouble())
private def createCommands(
templateDescriptor: FooTemplateDescriptor,
signatory: Primitive.Party,
observers: List[Primitive.Party],
divulgerContractKeyO: Option[Value],
payload: String,
): Seq[Command] = {
val contractCounter = FooCommandGenerator.nextContractNumber.getAndIncrement()
val fooKeyId = "foo-" + contractCounter
val fooContractKey = FooCommandGenerator.makeContractKeyValue(signatory, fooKeyId)
// Create events
val createFooCmd = divulgerContractKeyO match {
case Some(divulgerContractKey) =>
makeCreateAndDivulgeFooCommand(
divulgerContractKey = divulgerContractKey,
payload = payload,
fooKeyId = fooKeyId,
observers = observers,
templateName = templateDescriptor.name,
)
case None =>
templateDescriptor.name match {
case "Foo1" => Foo1(signatory, observers, payload, keyId = fooKeyId).create.command
case "Foo2" => Foo2(signatory, observers, payload, keyId = fooKeyId).create.command
case "Foo3" => Foo3(signatory, observers, payload, keyId = fooKeyId).create.command
}
}
if (config.allowNonTransientContracts) {
activeContractKeysPool.addContractKey(templateDescriptor.name, fooContractKey)
}
// Non-consuming events
val nonconsumingExercises: Seq[Command] = makeNonConsumingExerciseCommands(
templateDescriptor = templateDescriptor,
fooContractKey = fooContractKey,
)
// Consuming events
val consumingPayloadO: Option[String] = config.consumingExercises
.flatMap(config =>
if (randomnessProvider.randomDouble() <= config.probability) {
Some(randomPayload(config.payloadSizeBytes))
} else None
)
val consumingExerciseO: Option[Command] = consumingPayloadO.map { payload =>
val selectedActiveFooContractKey = {
if (config.allowNonTransientContracts) {
// This can choose at random a key of any the previously generated contracts.
activeContractKeysPool.getAndRemoveContractKey(templateDescriptor.name)
} else {
// This is always the key of the contract created in this batch of commands.
fooContractKey
}
}
divulgerContractKeyO match {
case Some(divulgerContractKey) =>
makeDivulgedConsumeExerciseCommand(
templateDescriptor = templateDescriptor,
fooContractKey = selectedActiveFooContractKey,
payload = payload,
divulgerContractKey = divulgerContractKey,
)
case None =>
makeExerciseByKeyCommand(
templateId = templateDescriptor.templateId,
choiceName = templateDescriptor.consumingChoiceName,
args = Seq(
RecordField(
label = "exercisePayload",
value = Some(Value(Value.Sum.Text(payload))),
)
),
)(contractKey = selectedActiveFooContractKey)
}
}
Seq(createFooCmd) ++ nonconsumingExercises ++ consumingExerciseO.toList
}
private def makeDivulgedConsumeExerciseCommand(
templateDescriptor: FooTemplateDescriptor,
fooContractKey: Value,
payload: String,
divulgerContractKey: Value,
): Command = {
makeExerciseByKeyCommand(
templateId = FooTemplateDescriptor.Divulger_templateId,
choiceName = FooTemplateDescriptor.Divulger_DivulgeConsumingExercise,
args = Seq(
RecordField(
label = "fooTemplateName",
value = Some(Value(Value.Sum.Text(templateDescriptor.name))),
),
RecordField(
label = "fooKey",
value = Some(fooContractKey),
),
RecordField(
label = "fooConsumingPayload",
value = Some(Value(Value.Sum.Text(payload))),
),
),
)(contractKey = divulgerContractKey)
}
private def makeNonConsumingExerciseCommands(
templateDescriptor: FooTemplateDescriptor,
fooContractKey: Value,
): Seq[Command] = {
val nonconsumingExercisePayloads: Seq[String] =
config.nonConsumingExercises.fold(Seq.empty[String]) { config =>
var f = config.probability.toInt
if (randomnessProvider.randomDouble() <= config.probability - f) {
f += 1
}
Seq.fill[String](f)(randomPayload(config.payloadSizeBytes))
}
val nonconsumingExercises = nonconsumingExercisePayloads.map { payload =>
makeExerciseByKeyCommand(
templateId = templateDescriptor.templateId,
choiceName = templateDescriptor.nonconsumingChoiceName,
args = Seq(
RecordField(
label = "exercisePayload",
value = Some(Value(Value.Sum.Text(payload))),
)
),
)(contractKey = fooContractKey)
}
nonconsumingExercises
}
private def makeCreateAndDivulgeFooCommand(
divulgerContractKey: Value,
payload: String,
fooKeyId: String,
observers: List[Primitive.Party],
templateName: String,
) = {
makeExerciseByKeyCommand(
templateId = FooTemplateDescriptor.Divulger_templateId,
choiceName = FooTemplateDescriptor.Divulger_DivulgeContractImmediate,
args = Seq(
RecordField(
label = "fooObservers",
value = Some(
Value(
Value.Sum.List(
com.daml.ledger.api.v1.value.List(
observers.map(obs => Value(Value.Sum.Party(obs.toString)))
)
)
)
),
),
RecordField(
label = "fooPayload",
value = Some(Value(Value.Sum.Text(payload))),
),
RecordField(
label = "fooKeyId",
value = Some(Value(Value.Sum.Text(fooKeyId))),
),
RecordField(
label = "fooTemplateName",
value = Some(Value(Value.Sum.Text(templateName))),
),
),
)(contractKey = divulgerContractKey)
}
def makeExerciseByKeyCommand(templateId: Identifier, choiceName: String, args: Seq[RecordField])(
contractKey: Value
): Command = {
val choiceArgument = Some(
Value(
Value.Sum.Record(
Record(
None,
args,
)
)
)
)
val c: Command = Command(
command = Command.Command.ExerciseByKey(
ExerciseByKeyCommand(
templateId = Some(templateId),
contractKey = Some(contractKey),
choice = choiceName,
choiceArgument = choiceArgument,
)
)
)
c
}
private def randomPayload(sizeBytes: Int): String =
FooCommandGenerator.randomPayload(randomnessProvider, sizeBytes)
}
object FooCommandGenerator {
private[submission] val nextContractNumber = new AtomicLong(0)
/** @return A DAML tuple of type `(Party, Text)`
*/
private[submission] def makeContractKeyValue(
party: binding.Primitive.Party,
keyId: String,
): Value = {
Value(
Value.Sum.Record(
Record(
None,
Seq(
RecordField(
value = Some(Value(Value.Sum.Party(party.toString)))
),
RecordField(
value = Some(Value(Value.Sum.Text(keyId)))
),
),
)
)
)
}
case class CommandGeneratorError(msg: String, cause: Throwable)
extends RuntimeException(msg, cause)
private[submission] def randomPayload(
randomnessProvider: RandomnessProvider,
sizeBytes: Int,
): String = {
randomnessProvider.randomAsciiString(sizeBytes)
}
}

View File

@ -1,67 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.v1.commands.Command
import com.daml.ledger.api.v1.value.Value
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.test.benchtool.Foo.Divulger
object FooDivulgerCommandGenerator {
/** Builds a create Divulger command for each non-empty subset of divulgees
* such that the created Divulger contract can be used to divulge (by immediate divulgence) Foo1, Foo2 or Foo3 contracts
* to the corresponding subset of divulgees.
*
* @param allDivulgees - Small number of divulgees. At most 5.
* @return A tuple of:
* - a sequence of create Divulger commands,
* - a map from sets of divulgees (all non-empty subsets of all divulgees) to corresponding contract keys,
*/
def makeCreateDivulgerCommands(
divulgingParty: Primitive.Party,
allDivulgees: List[Primitive.Party],
): (List[Command], Map[Set[Primitive.Party], Value]) = {
require(
allDivulgees.size <= 5,
s"Number of divulgee parties must be at most 5, was: ${allDivulgees.size}.",
)
def allNonEmptySubsets(divulgees: List[Primitive.Party]): List[List[Primitive.Party]] = {
def iter(remaining: List[Primitive.Party]): List[List[Primitive.Party]] = {
remaining match {
case Nil => List(List.empty)
case head :: tail =>
val sub: List[List[Primitive.Party]] = iter(tail)
val sub2: List[List[Primitive.Party]] = sub.map(xs => xs.prepended(head))
sub ::: sub2
}
}
import scalaz.syntax.tag._
iter(divulgees)
.collect {
case parties if parties.nonEmpty => parties.sortBy(_.unwrap)
}
}
def createDivulgerFor(divulgees: List[Primitive.Party]): (Command, Value) = {
val keyId = "divulger-" + FooCommandGenerator.nextContractNumber.getAndIncrement()
val createDivulgerCmd = Divulger(
divulgees = divulgees,
divulger = divulgingParty,
keyId = keyId,
).create.command
val divulgerKey: Value = FooCommandGenerator.makeContractKeyValue(divulgingParty, keyId)
(createDivulgerCmd, divulgerKey)
}
val allSubsets = allNonEmptySubsets(allDivulgees)
val (commands, keys, divulgeeSets) = allSubsets.map { divulgees: List[Primitive.Party] =>
val (cmd, key) = createDivulgerFor(divulgees)
(cmd, key, divulgees.toSet)
}.unzip3
val divulgeesToContractKeysMap = divulgeeSets.zip(keys).toMap
(commands, divulgeesToContractKeysMap)
}
}

View File

@ -1,67 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig
import com.daml.ledger.api.benchtool.submission.foo.RandomPartySelecting
import scala.concurrent.{ExecutionContext, Future}
class FooSubmission(
submitter: CommandSubmitter,
maxInFlightCommands: Int,
submissionBatchSize: Int,
allocatedParties: AllocatedParties,
names: Names,
randomnessProvider: RandomnessProvider,
) {
def performSubmission(submissionConfig: FooSubmissionConfig)(implicit
ec: ExecutionContext
): Future[Unit] = {
val (divulgerCmds, divulgeesToDivulgerKeyMap) = FooDivulgerCommandGenerator
.makeCreateDivulgerCommands(
divulgingParty = allocatedParties.signatory,
allDivulgees = allocatedParties.divulgees,
)
val partySelecting =
new RandomPartySelecting(
config = submissionConfig,
allocatedParties = allocatedParties,
randomnessProvider = randomnessProvider,
)
for {
_ <-
if (divulgerCmds.nonEmpty) {
require(
divulgeesToDivulgerKeyMap.nonEmpty,
"Map from divulgees to Divulger contract keys must be non empty.",
)
submitter.submitSingleBatch(
commandId = "divulgence-setup",
actAs = Seq(allocatedParties.signatory) ++ allocatedParties.divulgees,
commands = divulgerCmds,
)
} else {
Future.unit
}
generator: CommandGenerator = new FooCommandGenerator(
config = submissionConfig,
divulgeesToDivulgerKeyMap = divulgeesToDivulgerKeyMap,
names = names,
allocatedParties = allocatedParties,
partySelecting = partySelecting,
randomnessProvider = randomnessProvider,
)
_ <- submitter
.generateAndSubmit(
generator = generator,
config = submissionConfig,
baseActAs = List(allocatedParties.signatory) ++ allocatedParties.divulgees,
maxInFlightCommands = maxInFlightCommands,
submissionBatchSize = submissionBatchSize,
)
} yield ()
}
}

View File

@ -1,48 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.v1.value.Identifier
case class FooTemplateDescriptor(
name: String,
templateId: Identifier,
consumingChoiceName: String,
nonconsumingChoiceName: String,
)
/** NOTE: Keep me in sync with `Foo.daml`
*/
object FooTemplateDescriptor {
val Foo1: FooTemplateDescriptor = FooTemplateDescriptor(
name = "Foo1",
templateId = com.daml.ledger.test.benchtool.Foo.Foo1.id.asInstanceOf[Identifier],
consumingChoiceName = "Foo1_ConsumingChoice",
nonconsumingChoiceName = "Foo1_NonconsumingChoice",
)
val Foo2: FooTemplateDescriptor = FooTemplateDescriptor(
name = "Foo2",
templateId = com.daml.ledger.test.benchtool.Foo.Foo2.id.asInstanceOf[Identifier],
consumingChoiceName = "Foo2_ConsumingChoice",
nonconsumingChoiceName = "Foo2_NonconsumingChoice",
)
val Foo3: FooTemplateDescriptor = FooTemplateDescriptor(
name = "Foo3",
templateId = com.daml.ledger.test.benchtool.Foo.Foo3.id.asInstanceOf[Identifier],
consumingChoiceName = "Foo3_ConsumingChoice",
nonconsumingChoiceName = "Foo3_NonconsumingChoice",
)
private val all: Map[String, FooTemplateDescriptor] =
List(Foo1, Foo2, Foo3).map(foo => foo.name -> foo).toMap
def forName(templateName: String): FooTemplateDescriptor =
all.getOrElse(templateName, sys.error(s"Invalid template: $templateName"))
val Divulger_templateId: Identifier =
com.daml.ledger.test.benchtool.Foo.Divulger.id.asInstanceOf[Identifier]
val Divulger_DivulgeContractImmediate = "DivulgeContractImmediate"
val Divulger_DivulgeConsumingExercise = "DivulgeConsumingExercise"
}

View File

@ -1,92 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
/** Collects identifiers used by the benchtool in a single place.
*/
class Names {
import Names.{
SignatoryPrefix,
PartyPrefixSeparatorChar,
ObserverPrefix,
DivulgeePrefix,
ExtraSubmitterPrefix,
}
val identifierSuffix = f"${System.nanoTime}%x"
val benchtoolApplicationId = "benchtool"
val benchtoolUserId: String = benchtoolApplicationId
val workflowId = s"$benchtoolApplicationId-$identifierSuffix"
val signatoryPartyName = s"$SignatoryPrefix$PartyPrefixSeparatorChar$identifierSuffix"
def observerPartyNames(numberOfObservers: Int, uniqueParties: Boolean): Seq[String] =
partyNames(ObserverPrefix, numberOfObservers, uniqueParties)
def divulgeePartyNames(numberOfDivulgees: Int, uniqueParties: Boolean): Seq[String] =
partyNames(DivulgeePrefix, numberOfDivulgees, uniqueParties)
def extraSubmitterPartyNames(numberOfExtraSubmitters: Int, uniqueParties: Boolean): Seq[String] =
partyNames(
ExtraSubmitterPrefix,
numberOfExtraSubmitters,
uniqueParties,
padPartyIndexWithLeadingZeroes = true,
)
def partySetPartyName(prefix: String, numberOfParties: Int, uniqueParties: Boolean): Seq[String] =
partyNames(
prefix = prefix,
numberOfParties = numberOfParties,
uniqueParties = uniqueParties,
// Padding the party names with leading zeroes makes it more convenient to construct requests based on a party prefix.
// For example, if we have 1000 parties in a party set, we can use prefix 'Party-1' to match precisely the parties {Party-100, Party-101, .., Party-199}
padPartyIndexWithLeadingZeroes = true,
)
def commandId(index: Int): String = s"command-$index-$identifierSuffix"
def darId(index: Int) = s"submission-dars-$index-$identifierSuffix"
private def partyNames(
prefix: String,
numberOfParties: Int,
uniqueParties: Boolean,
padPartyIndexWithLeadingZeroes: Boolean = false,
): Seq[String] = {
val largestIndex = numberOfParties - 1
val paddingTargetLength = largestIndex.toString.length
def indexToString(i: Int): String =
if (padPartyIndexWithLeadingZeroes) {
padLeftWithZeroes(i, paddingTargetLength)
} else {
i.toString
}
(0 until numberOfParties).map(i => partyName(prefix, indexToString(i), uniqueParties))
}
private def padLeftWithZeroes(i: Int, len: Int): String = {
val iText = i.toString
"0" * (len - iText.length) + iText
}
private def partyName(baseName: String, index: String, uniqueParties: Boolean): String =
s"$baseName$PartyPrefixSeparatorChar$index" + (if (uniqueParties) identifierSuffix else "")
}
object Names {
protected val PartyPrefixSeparatorChar: Char = '-'
val SignatoryPrefix = "signatory"
val ObserverPrefix = "Obs"
val DivulgeePrefix = "Div"
val ExtraSubmitterPrefix = "Sub"
/** @return main prefix of a party which is the prefix up to the first '-' character
*/
def parsePartyNameMainPrefix(partyName: String): String = {
partyName.split(Names.PartyPrefixSeparatorChar)(0)
}
}

View File

@ -1,97 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.config.WorkflowConfig.SubmissionConfig
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.client
import com.daml.ledger.client.binding.Primitive
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
class PartyAllocating(
names: Names,
adminServices: LedgerApiServices,
) {
private val logger = LoggerFactory.getLogger(getClass)
def allocateParties(config: SubmissionConfig)(implicit
ec: ExecutionContext
): Future[AllocatedParties] = {
val observerPartyNames =
names.observerPartyNames(config.numberOfObservers, config.uniqueParties)
val divulgeePartyNames =
names.divulgeePartyNames(config.numberOfDivulgees, config.uniqueParties)
val extraSubmittersPartyNames =
names.extraSubmitterPartyNames(config.numberOfExtraSubmitters, config.uniqueParties)
val observersPartySetParties: Map[String, List[String]] = {
config.observerPartySets.map { partySet =>
val parties = names
.partySetPartyName(
prefix = partySet.partyNamePrefix,
numberOfParties = partySet.count,
uniqueParties = config.uniqueParties,
)
.toList
partySet.partyNamePrefix -> parties
}.toMap
}
logger.info("Allocating parties...")
for {
known <- lookupExistingParties()
signatory <- allocateSignatoryParty(known)
observers <- allocateParties(observerPartyNames, known)
divulgees <- allocateParties(divulgeePartyNames, known)
extraSubmitters <- allocateParties(extraSubmittersPartyNames, known)
partySetNames = observersPartySetParties.keys
partySetParties: Map[String, List[client.binding.Primitive.Party]] <- Future
.sequence(partySetNames.map { partySetName =>
allocateParties(observersPartySetParties(partySetName), known).map(partySetName -> _)
})
.map(_.toMap)
} yield {
logger.info("Allocating parties completed")
AllocatedParties(
signatoryO = Some(signatory),
observers = observers,
divulgees = divulgees,
extraSubmitters = extraSubmitters,
observerPartySets = partySetParties.view.map { case (partyName, parties) =>
AllocatedPartySet(
mainPartyNamePrefix = partyName,
parties = parties,
)
}.toList,
)
}
}
def lookupExistingParties()(implicit ec: ExecutionContext): Future[Set[String]] = {
adminServices.partyManagementService.listKnownParties()
}
private def allocateSignatoryParty(known: Set[String])(implicit
ec: ExecutionContext
): Future[Primitive.Party] =
lookupOrAllocateParty(names.signatoryPartyName, known)
private def allocateParties(partyNames: Seq[String], known: Set[String])(implicit
ec: ExecutionContext
): Future[List[Primitive.Party]] = {
Future.traverse(partyNames.toList)(lookupOrAllocateParty(_, known))
}
private def lookupOrAllocateParty(party: String, known: Set[String])(implicit
ec: ExecutionContext
): Future[Primitive.Party] = {
if (known.contains(party)) {
logger.info(s"Found known party: $party")
Future.successful(Primitive.Party(party))
} else
adminServices.partyManagementService.allocateParty(party)
}
}

View File

@ -1,31 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
trait RandomnessProvider {
def randomDouble(): Double // 0.0 <= randomDouble() < 1.0
/** Guarantees that each character will take exactly one byte in UTF-8.
*/
def randomAsciiString(n: Int): String
def randomNatural(n: Int): Int // 0 <= randomNatural(n) < n
}
object RandomnessProvider {
object Default extends Seeded(System.currentTimeMillis())
def forSeed(seed: Long) = new Seeded(seed = seed)
class Seeded(seed: Long) extends RandomnessProvider {
private val r = new scala.util.Random(seed)
override def randomDouble(): Double = r.nextDouble()
override def randomNatural(n: Int): Int = r.nextInt(n)
override def randomAsciiString(n: Int): String = {
val buffer = new StringBuilder(n)
0.until(n).foreach { _ =>
buffer.append(r.nextInt(127).toChar)
}
buffer.toString()
}
}
}

View File

@ -1,11 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission.foo
import com.daml.ledger.client.binding.Primitive
case class PartiesSelection(
observers: List[Primitive.Party],
divulgees: List[Primitive.Party],
)

View File

@ -1,60 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission.foo
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig
import com.daml.ledger.api.benchtool.submission.{AllocatedParties, RandomnessProvider}
import com.daml.ledger.client
import com.daml.ledger.client.binding.Primitive
class RandomPartySelecting(
config: FooSubmissionConfig,
allocatedParties: AllocatedParties,
randomnessProvider: RandomnessProvider,
) {
private val observersProbability = probabilitiesByPartyIndex(allocatedParties.observers)
private val divulgeesProbability = probabilitiesByPartyIndex(allocatedParties.divulgees)
private val extraSubmittersProbability = probabilitiesByPartyIndex(
allocatedParties.extraSubmitters
)
private val observerPartySetPartiesProbability: List[(client.binding.Primitive.Party, Double)] =
allocatedParties.observerPartySets.flatMap { partySet =>
val visibility = config.observerPartySets
.find(_.partyNamePrefix == partySet.mainPartyNamePrefix)
.fold(
sys.error(
s"Could not find visibility for party set ${partySet.mainPartyNamePrefix} in the submission config"
)
)(_.visibility)
partySet.parties.map(party => party -> visibility)
}
def nextPartiesForContracts(): PartiesSelection = {
PartiesSelection(
observers =
pickParties(observersProbability) ++ pickParties(observerPartySetPartiesProbability),
divulgees = pickParties(divulgeesProbability),
)
}
def nextExtraSubmitter(): List[Primitive.Party] = pickParties(extraSubmittersProbability)
private def pickParties(probabilities: List[(Primitive.Party, Double)]): List[Primitive.Party] =
probabilities
.collect { case (party, probability) if randomBoolean(probability) => party }
private def randomBoolean(truthProbability: Double): Boolean =
randomnessProvider.randomDouble() <= truthProbability
private def probabilitiesByPartyIndex(
orderedParties: List[Primitive.Party]
): List[(Primitive.Party, Double)] =
orderedParties.zipWithIndex.toMap.view.mapValues(probabilityBaseTen).toList
/** @return probability of a 1/(10**i)
*/
private def probabilityBaseTen(i: Int): Double = math.pow(10.0, -i.toDouble)
}

View File

@ -1,62 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.util
import com.daml.error.definitions.CommonErrors
import com.daml.error.utils.ErrorDetails
import io.grpc.stub.{ClientCallStreamObserver, ClientResponseObserver}
import org.slf4j.Logger
import scala.concurrent.{Future, Promise}
object ClientCancelled extends Exception
abstract class ObserverWithResult[RespT, Result](logger: Logger)
extends ClientResponseObserver[Any, RespT] {
private var requestStream: ClientCallStreamObserver[_] = null
def streamName: String
def result: Future[Result] = promise.future
def completeWith(): Future[Result]
override def onNext(value: RespT): Unit = ()
override def onError(t: Throwable): Unit = {
logger.error(withStreamName(s"Received error: $t"))
t match {
case ex: io.grpc.StatusRuntimeException if isServerShuttingDownError(ex) =>
logger.info(s"Stopping reading the stream due to the server being shut down.")
promise.completeWith(completeWith())
case ex if ex.getCause == ClientCancelled =>
logger.info(s"Stopping reading the stream due to a client cancellation.")
promise.completeWith(completeWith())
case ex =>
promise.failure(ex)
}
}
override def beforeStart(requestStream: ClientCallStreamObserver[Any]): Unit = {
this.requestStream = requestStream
}
def cancel(): Unit = {
requestStream.cancel(null, ClientCancelled)
}
private def isServerShuttingDownError(ex: io.grpc.StatusRuntimeException): Boolean =
ErrorDetails.matches(ex, CommonErrors.ServerIsShuttingDown)
override def onCompleted(): Unit = {
logger.info(withStreamName(s"Stream completed."))
promise.completeWith(completeWith())
}
private val promise: Promise[Result] = Promise[Result]()
protected def withStreamName(message: String) = s"[$streamName] $message"
}

View File

@ -1,131 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.util
import com.daml.ledger.api.benchtool.metrics.MetricsCollector.Response.{FinalReport, PeriodicReport}
import com.daml.ledger.api.benchtool.metrics._
import com.daml.ledger.api.benchtool.metrics.metrics.TotalRuntimeMetric
object ReportFormatter {
def formatPeriodicReport(streamName: String, periodicReport: PeriodicReport): String = {
val values = periodicReport.values.map(shortMetricReport).mkString(", ")
s"[$streamName] $values"
}
def formatFinalReport(streamName: String, finalReport: FinalReport): String = {
def valueFormat(label: String, value: String): String =
s"""[$streamName][final-value] $label: $value"""
def failureFormat(info: String): String = s"""[$streamName][failure] $info"""
val reports = finalReport.metricsData.flatMap { metricData =>
val valueLog: Option[String] =
if (includeInFinalReport(metricData.value))
Some(valueFormat(metricName(metricData.value), formattedValue(metricData.value)))
else
None
val violatedObjectives: List[String] =
metricData.violatedObjectives.map { case (objective, value) =>
val info =
s"${objectiveName(objective)}: required: ${formattedObjectiveValue(objective)}, metered: ${formattedValue(value)}"
failureFormat(info)
}
valueLog.toList ::: violatedObjectives
}
val durationLog =
valueFormat("Duration [s]", s"${finalReport.totalDuration.toMillis.toDouble / 1000}")
val reportWidth = 80
val bar = "=" * reportWidth
s"""
|$bar
| BENCHMARK RESULTS: $streamName
|$bar
|$durationLog
|${reports.mkString("\n")}
|$bar""".stripMargin
}
private def includeInFinalReport(value: MetricValue): Boolean = value match {
case _: ConsumptionSpeedMetric.Value => false
case _: DelayMetric.Value => false
case _ => true
}
private def metricName(value: MetricValue): String = value match {
case _: ConsumptionSpeedMetric.Value => "Consumption speed [-]"
case _: CountRateMetric.Value => "Item rate [item/s]"
case _: DelayMetric.Value => "Mean delay [s]"
case _: SizeMetric.Value => "Size rate [MB/s]"
case _: TotalCountMetric.Value => "Total item count [item]"
case _: LatencyMetric.Value => "Average latency (millis)"
case _: TotalRuntimeMetric.Value => "Total runtime [ms]"
}
private def shortMetricReport(value: MetricValue): String =
s"${shortMetricName(value)}: ${formattedValue(value)}"
private def shortMetricName(value: MetricValue): String = value match {
case _: ConsumptionSpeedMetric.Value => "speed [-]"
case _: CountRateMetric.Value => "rate [item/s]"
case _: DelayMetric.Value => "delay [s]"
case _: SizeMetric.Value => "rate [MB/s]"
case _: TotalCountMetric.Value => "count [item]"
case _: LatencyMetric.Value => "Average latency (millis)"
case _: TotalRuntimeMetric.Value => "Total runtime [ms]"
}
private def formattedValue(value: MetricValue): String = value match {
case v: ConsumptionSpeedMetric.Value =>
s"${v.relativeSpeed.map(rounded).getOrElse("-")}"
case v: CountRateMetric.Value =>
s"${rounded(v.ratePerSecond)}"
case v: DelayMetric.Value =>
s"${v.meanDelaySeconds.getOrElse("-")}"
case v: SizeMetric.Value =>
s"${rounded(v.megabytesPerSecond)}"
case v: TotalCountMetric.Value =>
s"${v.totalCount}"
case v: LatencyMetric.Value =>
s"${v.latencyNanos / 1000000.0d}"
case v: TotalRuntimeMetric.Value =>
v.v.toMillis.toString
}
private def objectiveName(objective: ServiceLevelObjective[_]): String =
objective match {
case _: DelayMetric.MaxDelay =>
s"Maximum record time delay [s]"
case _: ConsumptionSpeedMetric.MinConsumptionSpeed =>
s"Minimum consumption speed [-]"
case _: CountRateMetric.RateObjective.MinRate =>
s"Minimum item rate [item/s]"
case _: CountRateMetric.RateObjective.MaxRate =>
s"Maximum item rate [item/s]"
case _: LatencyMetric.MaxLatency =>
"Maximum latency (millis)"
case _: TotalRuntimeMetric.MaxDurationObjective =>
"Total runtime [ms]"
}
private def formattedObjectiveValue(objective: ServiceLevelObjective[_]): String =
objective match {
case obj: DelayMetric.MaxDelay =>
obj.maxDelaySeconds.toString
case obj: ConsumptionSpeedMetric.MinConsumptionSpeed =>
obj.minSpeed.toString
case obj: CountRateMetric.RateObjective.MinRate =>
obj.minAllowedRatePerSecond.toString
case obj: CountRateMetric.RateObjective.MaxRate =>
obj.minAllowedRatePerSecond.toString
case obj: LatencyMetric.MaxLatency =>
obj.millis.toString
case obj: TotalRuntimeMetric.MaxDurationObjective =>
obj.maxValue.toMillis.toString
}
private def rounded(value: Double): String = "%.2f".format(value)
}

View File

@ -1,19 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.util
import com.google.protobuf.ByteString
import java.io.{BufferedReader, File, FileReader, Reader}
import scala.util.{Try, Using}
object SimpleFileReader {
def readResource[Result](name: String): Try[ByteString] =
Using(getClass.getClassLoader.getResourceAsStream(name))(ByteString.readFrom)
def readFile[Result](file: File)(f: Reader => Result): Try[Result] =
Using(new BufferedReader(new FileReader(file)))(f)
}

View File

@ -1,24 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.util
import com.google.protobuf.timestamp.Timestamp
import java.time.{Duration, Instant}
object TimeUtil {
def timestampToInstant(timestamp: Timestamp): Instant =
Instant.ofEpochSecond(timestamp.seconds.toLong, timestamp.nanos.toLong)
def durationBetween(before: Timestamp, after: Instant): Duration =
Duration.between(timestampToInstant(before), after)
def durationBetween(before: Instant, after: Instant): Duration =
Duration.between(before, after)
/** Returns `true` if `a` is longer or equal to `b`. */
def isAtLeast(a: Duration, b: Duration): Boolean =
a.compareTo(b) >= 0
}

View File

@ -1,35 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.util
import akka.actor.typed.{ActorSystem, Behavior, SpawnProtocol}
import akka.actor.typed.scaladsl.Behaviors
import com.daml.ledger.resources.{ResourceContext, ResourceOwner}
import com.daml.resources.{AbstractResourceOwner, ReleasableResource, Resource}
import scala.concurrent.Future
class TypedActorSystemResourceOwner[BehaviorType](
acquireActorSystem: () => ActorSystem[BehaviorType]
) extends AbstractResourceOwner[ResourceContext, ActorSystem[BehaviorType]] {
override def acquire()(implicit
context: ResourceContext
): Resource[ResourceContext, ActorSystem[BehaviorType]] =
ReleasableResource(Future(acquireActorSystem()))(system => Future(system.terminate()))
}
object TypedActorSystemResourceOwner {
def owner(): ResourceOwner[ActorSystem[SpawnProtocol.Command]] =
new TypedActorSystemResourceOwner[SpawnProtocol.Command](() =>
ActorSystem(Creator(), "Creator")
)
object Creator {
def apply(): Behavior[SpawnProtocol.Command] =
Behaviors.setup { context =>
context.log.debug(s"Starting Creator actor")
SpawnProtocol()
}
}
}

View File

@ -1,89 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import java.io.File
import com.daml.bazeltools.BazelRunfiles.rlocation
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.metrics.MetricsManager.NoOpMetricsManager
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.benchtool.submission.{
AllocatedParties,
CommandSubmitter,
FooSubmission,
Names,
PartyAllocating,
RandomnessProvider,
}
import com.daml.ledger.test.BenchtoolTestDar
import com.daml.lf.language.LanguageVersion
import com.daml.metrics.api.noop.NoOpMetricsFactory
import com.daml.platform.sandbox.fixture.SandboxFixture
import org.scalatest.Suite
import scala.concurrent.{ExecutionContext, Future}
trait BenchtoolSandboxFixture extends SandboxFixture {
self: Suite =>
override protected def packageFiles: List[File] = List(
new File(rlocation(BenchtoolTestDar.path))
)
override def config = super.config.copy(
engine = super.config.engine.copy(allowedLanguageVersions = LanguageVersion.EarlyAccessVersions)
)
def benchtoolFixture()(implicit
ec: ExecutionContext
): Future[(LedgerApiServices, Names, CommandSubmitter)] = {
for {
ledgerApiServicesF <- LedgerApiServices.forChannel(
channel = channel,
authorizationHelper = None,
)
apiServices: LedgerApiServices = ledgerApiServicesF("someUser")
names = new Names()
submitter = CommandSubmitter(
names = names,
benchtoolUserServices = apiServices,
adminServices = apiServices,
metricsFactory = NoOpMetricsFactory,
metricsManager = NoOpMetricsManager(),
waitForSubmission = true,
partyAllocating = new PartyAllocating(
names = names,
adminServices = apiServices,
),
// Making command generation deterministic w.r.t. parallelism
commandGenerationParallelism = 1,
// Making command submission deterministic w.r.t. parallelism
maxInFlightCommandsOverride = Some(1),
)
} yield (
apiServices,
names,
submitter,
)
}
def benchtoolFooSubmissionFixture(
submissionConfig: WorkflowConfig.FooSubmissionConfig
)(implicit ec: ExecutionContext): Future[(LedgerApiServices, AllocatedParties, FooSubmission)] = {
for {
(apiServices, _, submitter) <- benchtoolFixture()
allocatedParties <- submitter.prepare(submissionConfig)
foo = new FooSubmission(
submitter = submitter,
maxInFlightCommands = 1,
submissionBatchSize = 1,
allocatedParties = allocatedParties,
names = new Names(),
randomnessProvider = RandomnessProvider.forSeed(seed = 0),
)
} yield (apiServices, allocatedParties, foo)
}
}

View File

@ -1,43 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.active_contracts_service.GetActiveContractsResponse
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
object ActiveContractsObserver {
def apply(expectedTemplateNames: Set[String]): ActiveContractsObserver =
new ActiveContractsObserver(
logger = LoggerFactory.getLogger(getClass),
expectedTemplateNames = expectedTemplateNames,
)
}
/** Collects information about create events from ACS.
*/
class ActiveContractsObserver(logger: Logger, expectedTemplateNames: Set[String])
extends ObserverWithResult[GetActiveContractsResponse, ObservedEvents](logger) {
private val createEvents = collection.mutable.ArrayBuffer[ObservedCreateEvent]()
override def streamName: String = "dummy-stream-name"
override def onNext(value: GetActiveContractsResponse): Unit =
for {
created <- value.activeContracts
} {
createEvents.addOne(ObservedCreateEvent(created, offset = value.offset))
}
override def completeWith(): Future[ObservedEvents] = Future.successful(
ObservedEvents(
expectedTemplateNames = expectedTemplateNames,
createEvents = createEvents.toList,
exerciseEvents = List.empty,
)
)
}

View File

@ -1,50 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml
package ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.command_completion_service.CompletionStreamResponse
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
object CompletionsObserver {
def apply(): CompletionsObserver = new CompletionsObserver(
logger = LoggerFactory.getLogger(getClass)
)
}
class CompletionsObserver(logger: Logger)
extends ObserverWithResult[CompletionStreamResponse, ObservedCompletions](logger) {
private val completions = collection.mutable.ArrayBuffer[ObservedCompletion]()
override def streamName: String = "dummy-stream-name"
override def onNext(value: CompletionStreamResponse): Unit = {
for {
completion <- value.completions
} {
completions.addOne(
ObservedCompletion(
applicationId = completion.applicationId,
actAs = completion.actAs,
)
)
}
}
override def completeWith(): Future[ObservedCompletions] = Future.successful(
ObservedCompletions(
completions = completions.toList
)
)
}
case class ObservedCompletion(applicationId: String, actAs: Seq[String])
case class ObservedCompletions(
completions: Seq[ObservedCompletion]
)

View File

@ -1,45 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.transaction_service.GetTransactionsResponse
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
object FlatEventsObserver {
def apply(expectedTemplateNames: Set[String]): FlatEventsObserver = new FlatEventsObserver(
logger = LoggerFactory.getLogger(getClass),
expectedTemplateNames = expectedTemplateNames,
)
}
/** Collects information about create and exercise events.
*/
class FlatEventsObserver(expectedTemplateNames: Set[String], logger: Logger)
extends ObserverWithResult[GetTransactionsResponse, ObservedEvents](logger) {
private val createEvents = collection.mutable.ArrayBuffer[ObservedCreateEvent]()
override def streamName: String = "dummy-stream-name"
override def onNext(value: GetTransactionsResponse): Unit =
for {
transaction <- value.transactions
allEvents = transaction.events
event <- allEvents
} {
event.event.created.foreach(created =>
createEvents.addOne(ObservedCreateEvent(created, offset = transaction.offset))
)
}
override def completeWith(): Future[ObservedEvents] = Future.successful(
ObservedEvents(
expectedTemplateNames = expectedTemplateNames,
createEvents = createEvents.toList,
)
)
}

View File

@ -1,30 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
case class ObservedCreateEvent(
templateName: String,
createArgumentsSerializedSize: Int,
interfaceViews: Seq[ObservedInterfaceView],
offset: String,
contractId: String,
)
object ObservedCreateEvent {
def apply(
created: com.daml.ledger.api.v1.event.CreatedEvent,
offset: String,
): ObservedCreateEvent = {
val argsSize = created.createArguments.fold(0)(_.serializedSize)
val templateName =
created.templateId.getOrElse(sys.error(s"Expected templateId in $created")).entityName
ObservedCreateEvent(
templateName = templateName,
createArgumentsSerializedSize = argsSize,
interfaceViews = created.interfaceViews.map(ObservedInterfaceView.apply),
offset = offset,
contractId = created.contractId,
)
}
}

View File

@ -1,59 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
case class ObservedEvents(
expectedTemplateNames: Set[String],
createEvents: Seq[ObservedCreateEvent],
exerciseEvents: Seq[ObservedExerciseEvent] = List.empty,
) {
private val _actualTemplateNames =
(createEvents.map(_.templateName) ++ exerciseEvents.map(_.templateName)).toSet
require(
_actualTemplateNames.subsetOf(expectedTemplateNames),
s"${_actualTemplateNames} must be a subset of $expectedTemplateNames",
)
val consumingExercises: Seq[ObservedExerciseEvent] = exerciseEvents.filter(_.consuming)
val nonConsumingExercises: Seq[ObservedExerciseEvent] = exerciseEvents.filterNot(_.consuming)
val avgSizeOfConsumingExercise: Int = {
if (consumingExercises.isEmpty) 0
else consumingExercises.map(_.choiceArgumentsSerializedSize).sum / consumingExercises.size
}
val avgSizeOfNonconsumingExercise: Int = {
if (nonConsumingExercises.isEmpty) 0
else
nonConsumingExercises.map(_.choiceArgumentsSerializedSize).sum / nonConsumingExercises.size
}
val numberOfCreatesPerTemplateName: Map[String, Int] = {
val groups = createEvents.groupBy(_.templateName)
expectedTemplateNames.map(name => name -> groups.get(name).fold(0)(_.size)).toMap
}
val numberOfConsumingExercisesPerTemplateName: Map[String, Int] = {
val groups = consumingExercises.groupBy(_.templateName)
expectedTemplateNames.map(name => name -> groups.get(name).fold(0)(_.size)).toMap
}
val numberOfNonConsumingExercisesPerTemplateName: Map[String, Int] = {
val groups = nonConsumingExercises.groupBy(_.templateName)
expectedTemplateNames.map(name => name -> groups.get(name).fold(0)(_.size)).toMap
}
val avgSizeOfCreateEventPerTemplateName: Map[String, Int] = {
val groups = createEvents.groupBy(_.templateName)
expectedTemplateNames.map { name =>
val avgSize = groups
.get(name)
.fold(0)(events =>
if (events.isEmpty) 0 else events.map(_.createArgumentsSerializedSize).sum / events.size
)
name -> avgSize
}.toMap
}
}

View File

@ -1,34 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
case class ObservedExerciseEvent(
templateName: String,
choiceName: String,
choiceArgumentsSerializedSize: Int,
consuming: Boolean,
offset: String,
contractId: String,
)
object ObservedExerciseEvent {
def apply(
exercised: com.daml.ledger.api.v1.event.ExercisedEvent,
offset: String,
): ObservedExerciseEvent = {
val argsSize = exercised.choiceArgument.fold(0)(_.serializedSize)
val templateName = exercised.templateId
.getOrElse(sys.error(s"Expected templateId in $exercised"))
.entityName
val contractId = exercised.contractId
val choiceName = exercised.choice
ObservedExerciseEvent(
templateName = templateName,
choiceName = choiceName,
choiceArgumentsSerializedSize = argsSize,
consuming = exercised.consuming,
offset = offset,
contractId = contractId,
)
}
}

View File

@ -1,16 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
case class ObservedInterfaceView(interfaceName: String, serializedSize: Int)
object ObservedInterfaceView {
def apply(interfaceView: com.daml.ledger.api.v1.event.InterfaceView): ObservedInterfaceView = {
val interfaceName =
interfaceView.interfaceId
.getOrElse(sys.error(s"Expected interfaceId in $interfaceView"))
.entityName
val serializedSize = interfaceView.serializedSize
ObservedInterfaceView(interfaceName, serializedSize)
}
}

View File

@ -1,54 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.util.ObserverWithResult
import com.daml.ledger.api.v1.transaction_service.GetTransactionTreesResponse
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
object TreeEventsObserver {
def apply(expectedTemplateNames: Set[String]): TreeEventsObserver = new TreeEventsObserver(
logger = LoggerFactory.getLogger(getClass),
expectedTemplateNames = expectedTemplateNames,
)
}
/** Collects information about create and exercise events.
*/
class TreeEventsObserver(expectedTemplateNames: Set[String], logger: Logger)
extends ObserverWithResult[GetTransactionTreesResponse, ObservedEvents](logger) {
private val createEvents = collection.mutable.ArrayBuffer[ObservedCreateEvent]()
private val exerciseEvents = collection.mutable.ArrayBuffer[ObservedExerciseEvent]()
override def streamName: String = "dummy-stream-name"
override def onNext(value: GetTransactionTreesResponse): Unit = {
for {
transaction <- value.transactions
allEvents = transaction.eventsById.values
event <- allEvents
} {
event.kind.created.foreach(created =>
createEvents.addOne(ObservedCreateEvent(created, offset = transaction.offset))
)
event.kind.exercised.foreach(exercised =>
exerciseEvents.addOne(ObservedExerciseEvent(exercised, offset = transaction.offset))
)
}
}
override def completeWith(): Future[ObservedEvents] =
Future.successful(
ObservedEvents(
expectedTemplateNames = expectedTemplateNames,
createEvents = createEvents.toList,
exerciseEvents = exerciseEvents.toList,
)
)
}

View File

@ -1,139 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig.{
PartyFilter,
PartyNamePrefixFilter,
TransactionsStreamConfig,
}
import com.daml.ledger.api.benchtool.submission.{
AllocatedParties,
AllocatedPartySet,
BenchtoolTestsPackageInfo,
}
import com.daml.ledger.client.binding.Primitive
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scalaz.syntax.tag._
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
class ConfigEnricherSpec extends AnyFlatSpec with Matchers {
it should "expand party-set filter into a sequence of party filters" in {
def makePartyName(shortName: String): String = s"$shortName"
def makeParty(shortName: String): Primitive.Party = Primitive.Party(makePartyName(shortName))
val desugaring = new ConfigEnricher(
allocatedParties = AllocatedParties(
signatoryO = Some(makeParty("Sig-0")),
observers = List(makeParty("Obs-0")),
divulgees = List(makeParty("Div-0")),
extraSubmitters = List(makeParty("Sub-0")),
observerPartySets = List(
AllocatedPartySet(
mainPartyNamePrefix = "MyParty",
List("MyParty-0", "MyParty-1", "MyParty-11", "MyParty-12", "MyParty-21", "MyParty-22")
.map(makeParty),
)
),
),
BenchtoolTestsPackageInfo.StaticDefault,
)
val templates: List[String] = List("otherTemplate", "Foo1")
val foo1Id = com.daml.ledger.test.benchtool.Foo.Foo1.id.unwrap
val enrichedTemplates: List[String] =
List("otherTemplate", s"${foo1Id.packageId}:${foo1Id.moduleName}:${foo1Id.entityName}")
desugaring.enrichStreamConfig(
TransactionsStreamConfig(
name = "flat",
filters = List(
PartyFilter(
party = "Obs-0",
templates = templates,
interfaces = List.empty,
),
PartyFilter(
party = "Sig-0",
templates = templates,
interfaces = List.empty,
),
PartyFilter(
party = "UnknownParty-0",
templates = templates,
interfaces = List.empty,
),
),
partyNamePrefixFilters = List(
PartyNamePrefixFilter(
partyNamePrefix = "MyParty-1",
templates = templates,
),
PartyNamePrefixFilter(
partyNamePrefix = "MyParty-2",
templates = templates,
),
PartyNamePrefixFilter(
partyNamePrefix = "Obs",
templates = templates,
),
),
subscriptionDelay = Some(Duration(1337, TimeUnit.SECONDS)),
)
) shouldBe TransactionsStreamConfig(
name = "flat",
filters = List(
PartyFilter(
party = "Obs-0",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "Sig-0",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "UnknownParty-0",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "MyParty-1",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "MyParty-11",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "MyParty-12",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "MyParty-21",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "MyParty-22",
templates = enrichedTemplates,
interfaces = List.empty,
),
PartyFilter(
party = "Obs-0",
templates = enrichedTemplates,
interfaces = List.empty,
),
),
subscriptionDelay = Some(Duration(1337, TimeUnit.SECONDS)),
)
}
}

View File

@ -1,348 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
import scala.concurrent.duration._
import java.io.File
import java.util.concurrent.TimeUnit
class CliSpec extends AnyWordSpec with Matchers with OptionValues with TableDrivenPropertyChecks {
"Cli" should {
"produce the default config when no arguments defined" in {
parse() shouldBe Config.Default
}
"parse ledger API endpoint" in {
val endpoint = "foo:123"
val expectedConfig = Config.Default.copy(
ledger = Config.Ledger(
hostname = "foo",
port = 123,
)
)
parse("--endpoint", endpoint) shouldBe expectedConfig
parse("-e", endpoint) shouldBe expectedConfig
}
"parse workflow config location" in {
val workflowFile = "/some/path/to/file"
val expectedConfig = Config.Default.copy(workflowConfigFile = Some(new File(workflowFile)))
parse("--workflow-config", workflowFile) shouldBe expectedConfig
parse("-w", workflowFile) shouldBe expectedConfig
}
"parse maximum number of in-flight commands parameter" in {
val maxCommands = 123
val expectedConfig = Config.Default.copy(maxInFlightCommands = maxCommands)
parse("--max-in-flight-commands", maxCommands.toString) shouldBe expectedConfig
}
"parse submission batch size" in {
val batchSize = 1234
val expectedConfig = Config.Default.copy(submissionBatchSize = batchSize)
parse("--submission-batch-size", batchSize.toString) shouldBe expectedConfig
}
"parse log interval" in {
val cases = Table(
"cli value" -> "duration",
"1s" -> 1.second,
"123millis" -> 123.millis,
"5m" -> 5.minutes,
)
forAll(cases) { (argument, intervalDuration) =>
val expectedConfig = Config.Default.copy(reportingPeriod = intervalDuration)
parse("--log-interval", argument) shouldBe expectedConfig
parse("-r", argument) shouldBe expectedConfig
}
}
"parse thread pool executor's core pool size" in {
val size = 123
val expectedConfig =
Config.Default.copy(concurrency = Config.Default.concurrency.copy(corePoolSize = size))
parse("--core-pool-size", size.toString) shouldBe expectedConfig
}
"parse thread pool executor's max pool size" in {
val size = 123
val expectedConfig =
Config.Default.copy(concurrency = Config.Default.concurrency.copy(maxPoolSize = size))
parse("--max-pool-size", size.toString) shouldBe expectedConfig
}
"parse stream type" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party1 = "dummy1"
val party2 = "dummy2"
val appId = "appid"
val cases = Table(
"cli argument" -> "stream config",
s"stream-type=transactions,name=$name,filters=$party1" -> TransactionsStreamConfig(
name = name,
filters = List(PartyFilter(party1, Nil, Nil)),
beginOffset = None,
endOffset = None,
objectives = None,
maxItemCount = None,
timeoutO = None,
),
s"stream-type=transaction-trees,name=$name,filters=$party1" -> TransactionTreesStreamConfig(
name = name,
filters = List(PartyFilter(party1, Nil, Nil)),
beginOffset = None,
endOffset = None,
objectives = None,
maxItemCount = None,
timeoutO = None,
),
s"stream-type=active-contracts,name=$name,filters=$party1" -> ActiveContractsStreamConfig(
name = name,
filters = List(PartyFilter(party1, Nil, Nil)),
objectives = None,
maxItemCount = None,
timeoutO = None,
),
s"stream-type=completions,name=$name,parties=$party1+$party2,application-id=$appId,timeout=123s,max-item-count=5" -> CompletionsStreamConfig(
name = name,
parties = List(party1, party2),
applicationId = appId,
beginOffset = None,
objectives = None,
timeoutO = Some(Duration(123, TimeUnit.SECONDS)),
maxItemCount = Some(5),
),
)
forAll(cases) { (argument, config) =>
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(config)))
parse("--consume-stream", argument) shouldBe expectedConfig
parse("-s", argument) shouldBe expectedConfig
}
}
"parse stream filters" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party1 = "alice"
val party2 = "bob"
val party3 = "david"
val template1 = "packageid:Foo:Foo1"
val template2 = "packageid2:Foo:Foo2"
// each party filter separated by '+' and each template in a filter separated by '@'
val filters = s"$party1+$party2@$template1@$template2+$party3@$template2"
val filtersList = List(
PartyFilter(party1, List(), List()),
PartyFilter(party2, List(template1, template2), List()),
PartyFilter(party3, List(template2), List()),
)
val cases = Table(
"cli argument" -> "stream config",
s"stream-type=transactions,name=$name,filters=$filters" -> TransactionsStreamConfig(
name = name,
filters = filtersList,
beginOffset = None,
endOffset = None,
objectives = None,
maxItemCount = None,
timeoutO = None,
),
s"stream-type=transaction-trees,name=$name,filters=$filters" -> TransactionTreesStreamConfig(
name = name,
filters = filtersList,
beginOffset = None,
endOffset = None,
objectives = None,
maxItemCount = None,
timeoutO = None,
),
s"stream-type=active-contracts,name=$name,filters=$filters" -> ActiveContractsStreamConfig(
name = name,
filters = filtersList,
objectives = None,
maxItemCount = None,
timeoutO = None,
),
)
forAll(cases) { (argument, config) =>
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(config)))
parse("--consume-stream", argument) shouldBe expectedConfig
parse("-s", argument) shouldBe expectedConfig
}
}
"parse begin offset" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party = "dummy"
val cases = Table(
"cli parameter" -> "offset",
"abcdef" -> LedgerOffset.defaultInstance.withAbsolute("abcdef"),
"ledger-begin" -> LedgerOffset.defaultInstance.withBoundary(
LedgerOffset.LedgerBoundary.LEDGER_BEGIN
),
"ledger-end" -> LedgerOffset.defaultInstance.withBoundary(
LedgerOffset.LedgerBoundary.LEDGER_END
),
)
forAll(cases) { (argument, offset) =>
val streamConfig = TransactionsStreamConfig(
name = name,
filters = List(PartyFilter(party, Nil, Nil)),
beginOffset = Some(offset),
endOffset = None,
objectives = None,
maxItemCount = None,
timeoutO = None,
)
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(streamConfig)))
parse(
"--consume-stream",
s"stream-type=transactions,name=$name,filters=$party,begin-offset=$argument",
) shouldBe expectedConfig
}
}
"parse end offset" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party = "dummy"
val cases = Table(
"cli parameter" -> "offset",
"abcdef" -> LedgerOffset.defaultInstance.withAbsolute("abcdef"),
"ledger-begin" -> LedgerOffset.defaultInstance.withBoundary(
LedgerOffset.LedgerBoundary.LEDGER_BEGIN
),
"ledger-end" -> LedgerOffset.defaultInstance.withBoundary(
LedgerOffset.LedgerBoundary.LEDGER_END
),
)
forAll(cases) { (argument, offset) =>
val streamConfig = TransactionsStreamConfig(
name = name,
filters = List(PartyFilter(party, Nil, Nil)),
beginOffset = None,
endOffset = Some(offset),
objectives = None,
maxItemCount = None,
timeoutO = None,
)
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(streamConfig)))
parse(
"--consume-stream",
s"stream-type=transactions,name=$name,filters=$party,end-offset=$argument",
) shouldBe expectedConfig
}
}
"parse transaction objectives" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party = "dummy"
val cases = Table(
"cli parameter" -> "objectives",
"max-delay=5" -> TransactionObjectives(maxDelaySeconds = Some(5), None, None, None),
"min-consumption-speed=1.23" -> TransactionObjectives(
None,
minConsumptionSpeed = Some(1.23),
None,
None,
),
"min-item-rate=1234.5" -> TransactionObjectives(
None,
None,
minItemRate = Some(1234.5),
None,
),
"max-item-rate=1234.5" -> TransactionObjectives(
None,
None,
None,
maxItemRate = Some(1234.5),
),
)
forAll(cases) { (argument, objectives) =>
val streamConfig = TransactionsStreamConfig(
name = name,
filters = List(PartyFilter(party, Nil, Nil)),
beginOffset = None,
endOffset = None,
objectives = Some(objectives),
maxItemCount = None,
timeoutO = None,
)
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(streamConfig)))
parse(
"--consume-stream",
s"stream-type=transactions,name=$name,filters=$party,$argument",
) shouldBe expectedConfig
}
}
"parse rate objectives" in {
import WorkflowConfig.StreamConfig._
val name = "streamname"
val party = "dummy"
val cases = Table(
"cli parameter" -> "objectives",
"min-item-rate=1234.5" -> AcsAndCompletionsObjectives(minItemRate = Some(1234.5), None),
"max-item-rate=1234.5" -> AcsAndCompletionsObjectives(None, maxItemRate = Some(1234.5)),
)
forAll(cases) { (argument, objectives) =>
val streamConfig = ActiveContractsStreamConfig(
name = name,
filters = List(PartyFilter(party, Nil, Nil)),
objectives = Some(objectives),
maxItemCount = None,
timeoutO = None,
)
val expectedConfig =
Config.Default.copy(workflow = Config.Default.workflow.copy(streams = List(streamConfig)))
parse(
"--consume-stream",
s"stream-type=active-contracts,name=$name,filters=$party,$argument",
) shouldBe expectedConfig
}
}
"parse `latency-test` flag" in {
val expectedConfig = Config.Default.copy(latencyTest = true)
parse("--latency-test") shouldBe expectedConfig
}
"parse `max-latency-objective` flag" in {
val expectedConfig = Config.Default.copy(maxLatencyObjectiveMillis = 6000L)
parse("--max-latency-millis", "6000") shouldBe expectedConfig
}
"`latency-test` cannot be enabled with configured workflow streams" in {
Cli.config(
Array(
"--latency-test",
"--consume-stream",
s"stream-type=transactions,name=some-name,filters=some-filter,end-offset=ABC",
)
) shouldBe empty
}
}
private def parse(args: String*): Config =
Cli.config(args.toArray).value
}

View File

@ -1,690 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.config
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.io.StringReader
import com.daml.ledger.api.benchtool.config.WorkflowConfig.{FooSubmissionConfig, PruningConfig}
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig.PartySet
import com.daml.ledger.api.benchtool.config.WorkflowConfig.StreamConfig.PartyNamePrefixFilter
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
class WorkflowConfigParserSpec extends AnyWordSpec with Matchers {
private val ledgerBeginOffset =
LedgerOffset.defaultInstance.withBoundary(LedgerOffset.LedgerBoundary.LEDGER_BEGIN)
private val ledgerEndOffset =
LedgerOffset.defaultInstance.withBoundary(LedgerOffset.LedgerBoundary.LEDGER_END)
"WorkflowConfigParser" should {
"parse complete workflow configuration" in {
val yaml =
"""submission:
| type: foo
| num_instances: 500
| num_observers: 4
| num_divulgees: 5
| num_extra_submitters: 6
| unique_parties: true
| allow_non_transient_contracts: true
| instance_distribution:
| - template: Foo1
| weight: 50
| payload_size_bytes: 60
| nonconsuming_exercises:
| probability: 4.9
| payload_size_bytes: 100
| consuming_exercises:
| probability: 0.5
| payload_size_bytes: 200
| application_ids:
| - id: App-1
| weight: 100
| - id: App-2
| weight: 102
| observers_party_sets:
| - party_name_prefix: FooParty
| count: 99
| visibility: 0.35
| - party_name_prefix: BazParty
| count: 10
| visibility: 0.01
|streams:
| - type: active-contracts
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| subscription_delay: 7min
| objectives:
| min_item_rate: 123
| max_item_rate: 456
| max_item_count: 700
| - type: transactions
| name: stream-2
| filters:
| - party: Obs-2
| templates:
| - Foo1
|unary:
| - type: pruning
| name: pruning-123
| prune_all_divulged_contracts: false
| max_duration_objective: 56 ms
|""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = Some(
WorkflowConfig.FooSubmissionConfig(
allowNonTransientContracts = true,
numberOfInstances = 500,
numberOfObservers = 4,
numberOfDivulgees = 5,
numberOfExtraSubmitters = 6,
uniqueParties = true,
instanceDistribution = List(
WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo1",
weight = 50,
payloadSizeBytes = 60,
)
),
nonConsumingExercises = Some(
WorkflowConfig.FooSubmissionConfig.NonconsumingExercises(
probability = 4.9,
payloadSizeBytes = 100,
)
),
consumingExercises = Some(
WorkflowConfig.FooSubmissionConfig.ConsumingExercises(
probability = 0.5,
payloadSizeBytes = 200,
)
),
applicationIds = List(
FooSubmissionConfig.ApplicationId(
applicationId = "App-1",
weight = 100,
),
FooSubmissionConfig.ApplicationId(
applicationId = "App-2",
weight = 102,
),
),
observerPartySets = List(
PartySet(partyNamePrefix = "FooParty", count = 99, visibility = 0.35),
PartySet(partyNamePrefix = "BazParty", count = 10, visibility = 0.01),
),
)
),
streams = List(
WorkflowConfig.StreamConfig.ActiveContractsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
objectives = Some(
WorkflowConfig.StreamConfig.AcsAndCompletionsObjectives(
minItemRate = Some(123),
maxItemRate = Some(456),
)
),
maxItemCount = Some(700),
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
),
// Configuration with all optional values missing
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-2",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1"),
)
),
),
),
pruning = Some(
PruningConfig(
name = "pruning-123",
pruneAllDivulgedContracts = false,
maxDurationObjective = Duration(56, TimeUnit.MILLISECONDS),
)
),
)
)
}
"parse foo submission configuration" in {
val yaml =
"""submission:
| type: foo
| num_instances: 500
| num_divulgees: 1
| num_observers: 4
| num_divulgees: 5
| num_extra_submitters: 6
| unique_parties: true
| instance_distribution:
| - template: Foo1
| weight: 50
| payload_size_bytes: 60
| - template: Foo2
| weight: 25
| payload_size_bytes: 35
| - template: Foo3
| weight: 10
| payload_size_bytes: 25
""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = Some(
WorkflowConfig.FooSubmissionConfig(
numberOfInstances = 500,
numberOfObservers = 4,
numberOfDivulgees = 5,
numberOfExtraSubmitters = 6,
uniqueParties = true,
instanceDistribution = List(
WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo1",
weight = 50,
payloadSizeBytes = 60,
),
WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo2",
weight = 25,
payloadSizeBytes = 35,
),
WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo3",
weight = 10,
payloadSizeBytes = 25,
),
),
nonConsumingExercises = None,
consumingExercises = None,
applicationIds = List.empty,
)
),
streams = Nil,
)
)
}
"parse fibonacci submission configuration" in {
val yaml =
"""submission:
| type: fibonacci
| num_instances: 500
| unique_parties: true
| value: 7
| wait_for_submission: true
""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = Some(
WorkflowConfig.FibonacciSubmissionConfig(
numberOfInstances = 500,
uniqueParties = true,
value = 7,
waitForSubmission = true,
)
),
streams = Nil,
)
)
}
"parse transactions stream configuration" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| party_prefix_filters:
| - party_name_prefix: MyParty
| templates: [Foo1, Foo2]
| - party_name_prefix: MyOtherParty
| templates: [Foo1]
| begin_offset: foo
| end_offset: bar
| subscription_delay: 7min
| objectives:
| max_delay_seconds: 123
| min_consumption_speed: 2.34
| min_item_rate: 12
| max_item_rate: 34
| max_stream_duration: 56 ms
|""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
partyNamePrefixFilters = List(
PartyNamePrefixFilter(
partyNamePrefix = "MyParty",
templates = List("Foo1", "Foo2"),
),
PartyNamePrefixFilter(
partyNamePrefix = "MyOtherParty",
templates = List("Foo1"),
),
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = Some(123),
minConsumptionSpeed = Some(2.34),
minItemRate = Some(12),
maxItemRate = Some(34),
maxTotalStreamRuntimeDuration = Some(Duration(56, TimeUnit.MILLISECONDS)),
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
// Right(WorkflowConfig(None,List(TransactionsStreamConfig(stream-1,List(PartyFilter(Obs-2,List(Foo1, Foo3),List())),List(PartyNamePrefixFilter(MyParty,List(Foo1, Foo2),List()), PartyNamePrefixFilter(MyOtherParty,List(Foo1),List())),Some(LedgerOffset(Absolute(foo))),Some(LedgerOffset(Absolute(foo))),Some(TransactionObjectives(Some(123),Some(2.34),Some(12.0),Some(34.0),Some(56 milliseconds))),Some(7 minutes),None,None)))) was not equal to
// Right(WorkflowConfig(None,List(TransactionsStreamConfig(stream-1,List(PartyFilter(Obs-2,List(Foo1, Foo3),List())),List(PartyNamePrefixFilter(MyParty,List(Foo1, Foo2),List()), PartyNamePrefixFilter(MyOtherParty,List(Foo1),List())),Some(LedgerOffset(Absolute(foo))),Some(LedgerOffset(Absolute(bar))),Some(TransactionObjectives(Some(123),Some(2.34),Some(12.0),Some(34.0),Some(56 milliseconds))),Some(7 minutes),None,None))))
}
"parse stream configuration with some objectives set" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| begin_offset: foo
| end_offset: bar
| subscription_delay: 7min
| objectives:
| min_consumption_speed: 2.34
| min_item_rate: 12""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = None,
minConsumptionSpeed = Some(2.34),
minItemRate = Some(12),
maxItemRate = None,
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse stream configuration without objectives" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| subscription_delay: 7min
| begin_offset: foo
| end_offset: bar""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = None,
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse transaction-trees stream configuration" in {
val yaml =
"""streams:
| - type: transaction-trees
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| - party: Obs-3
| begin_offset: foo
| end_offset: bar
| subscription_delay: 7min
| objectives:
| max_delay_seconds: 123
| min_consumption_speed: 2.34
| min_item_rate: 12
| max_item_rate: 34""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionTreesStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
),
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-3",
templates = List.empty,
),
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = Some(123),
minConsumptionSpeed = Some(2.34),
minItemRate = Some(12),
maxItemRate = Some(34),
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse active contracts stream configuration" in {
val yaml =
"""streams:
| - type: active-contracts
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| subscription_delay: 7min
| objectives:
| min_item_rate: 123
| max_item_rate: 4567""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.ActiveContractsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
objectives = Some(
WorkflowConfig.StreamConfig.AcsAndCompletionsObjectives(
minItemRate = Some(123),
maxItemRate = Some(4567),
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse completions stream configuration" in {
val yaml =
"""streams:
| - type: completions
| name: stream-1
| parties: [Obs-2]
| begin_offset: foo
| application_id: foobar
| timeout: 100s
| max_item_count: 101
| subscription_delay: 7min
| objectives:
| min_item_rate: 12
| max_item_rate: 345""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.CompletionsStreamConfig(
name = "stream-1",
parties = List("Obs-2"),
beginOffset = Some(offset("foo")),
applicationId = "foobar",
objectives = Some(
WorkflowConfig.StreamConfig.AcsAndCompletionsObjectives(
minItemRate = Some(12),
maxItemRate = Some(345),
)
),
timeoutO = Some(Duration(100, TimeUnit.SECONDS)),
maxItemCount = Some(101L),
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse ledger-begin and ledger-end markers" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| subscription_delay: 7min
| begin_offset: ledger-begin
| end_offset: ledger-end""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
beginOffset = Some(ledgerBeginOffset),
endOffset = Some(ledgerEndOffset),
objectives = None,
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
}
"parse stream configuration with interface filters" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| interfaces:
| - FooInterface
| begin_offset: foo
| end_offset: bar
| subscription_delay: 7min
| objectives:
| min_consumption_speed: 2.34
| min_item_rate: 12""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
interfaces = List("FooInterface"),
)
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = None,
minConsumptionSpeed = Some(2.34),
minItemRate = Some(12),
maxItemRate = None,
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
"parse party_prefix_filters interfaces" in {
val yaml =
"""streams:
| - type: transactions
| name: stream-1
| filters:
| - party: Obs-2
| templates:
| - Foo1
| - Foo3
| party_prefix_filters:
| - party_name_prefix: My-Party
| interfaces: [FooInterface]
| begin_offset: foo
| end_offset: bar
| subscription_delay: 7min
| objectives:
| max_delay_seconds: 123
| min_consumption_speed: 2.34
| min_item_rate: 12
| max_item_rate: 34
| max_stream_duration: 56 ms
|""".stripMargin
parseYaml(yaml) shouldBe Right(
WorkflowConfig(
submission = None,
streams = List(
WorkflowConfig.StreamConfig.TransactionsStreamConfig(
name = "stream-1",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = "Obs-2",
templates = List("Foo1", "Foo3"),
)
),
partyNamePrefixFilters = List(
PartyNamePrefixFilter(
partyNamePrefix = "My-Party",
interfaces = List("FooInterface"),
)
),
beginOffset = Some(offset("foo")),
endOffset = Some(offset("bar")),
objectives = Some(
WorkflowConfig.StreamConfig.TransactionObjectives(
maxDelaySeconds = Some(123),
minConsumptionSpeed = Some(2.34),
minItemRate = Some(12),
maxItemRate = Some(34),
maxTotalStreamRuntimeDuration = Some(Duration(56, TimeUnit.MILLISECONDS)),
)
),
maxItemCount = None,
timeoutO = None,
subscriptionDelay = Some(Duration(7, TimeUnit.MINUTES)),
)
),
)
)
}
def parseYaml(yaml: String): Either[WorkflowConfigParser.ParserError, WorkflowConfig] =
WorkflowConfigParser.parse(new StringReader(yaml))
def offset(str: String): LedgerOffset = LedgerOffset.defaultInstance.withAbsolute(str)
}

View File

@ -1,269 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.ConsumptionSpeedMetric._
import com.google.protobuf.timestamp.Timestamp
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.time.{Clock, Duration, Instant}
import scala.language.existentials
class ConsumptionSpeedMetricSpec extends AnyWordSpec with Matchers {
ConsumptionSpeedMetric.getClass.getSimpleName should {
"correctly handle initial state" in {
val metric = ConsumptionSpeedMetric.empty[String](_ => List.empty)
val (_, periodicValue) = metric.periodicValue(Duration.ofMillis(100))
val finalValue = metric.finalValue(Duration.ofSeconds(1))
periodicValue shouldBe Value(Some(0.0))
finalValue shouldBe Value(None)
}
"compute values after processing elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(3)
val elem1: String = "a"
val elem2: String = "d"
val testNow = Clock.systemUTC().instant()
val recordTimes1 = List(
testNow.minusSeconds(100),
testNow.minusSeconds(50),
)
val recordTimes2 = List(
testNow.minusSeconds(20)
)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> recordTimes1,
elem2 -> recordTimes2,
)
)
val metric = ConsumptionSpeedMetric.empty[String](testRecordTimeFunction)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val firstElementOfThePeriod = recordTimes1.head
val lastElementOfThePeriod = recordTimes2.last
val expectedSpeed =
(lastElementOfThePeriod.getEpochSecond - firstElementOfThePeriod.getEpochSecond) * 1000.0 / periodDuration.toMillis
periodicValue shouldBe Value(Some(expectedSpeed))
finalValue shouldBe Value(None)
}
"correctly handle initial periods with a single record time" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(3)
val elem1: String = "a"
val testNow = Clock.systemUTC().instant()
val recordTimes1 = List(testNow.minusSeconds(11))
// The assumption made here is that each consecutive element has higher record times
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> recordTimes1
)
)
val metric = ConsumptionSpeedMetric.empty[String](testRecordTimeFunction)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
periodicValue shouldBe Value(Some(0.0))
finalValue shouldBe Value(None)
}
"correctly handle non-initial periods with a single record time" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(3)
val elem1: String = "a"
val elem2: String = "b"
val testNow = Clock.systemUTC().instant()
val recordTimes1 = List(
testNow.minusSeconds(100),
testNow.minusSeconds(50),
)
val recordTimes2 = List(
testNow.minusSeconds(20)
)
// The assumption made here is that each consecutive element has higher record times
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> recordTimes1,
elem2 -> recordTimes2,
)
)
val metric = ConsumptionSpeedMetric.empty[String](testRecordTimeFunction)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.periodicValue(periodDuration)
._1
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
periodicValue shouldBe Value(Some(300.0))
finalValue shouldBe Value(None)
}
"correctly handle periods with no elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(3)
val elem1: String = "a"
val elem2: String = "b"
val testNow = Clock.systemUTC().instant()
val recordTimes1 = List(
testNow.minusSeconds(100)
)
val recordTimes2 = List(
testNow.minusSeconds(90)
)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> recordTimes1,
elem2 -> recordTimes2,
)
)
val metric = ConsumptionSpeedMetric.empty[String](testRecordTimeFunction)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
periodicValue shouldBe Value(Some(0.0))
finalValue shouldBe Value(None)
}
"correctly handle multiple periods with elements" in {
val period1Duration: Duration = Duration.ofMillis(100)
val period2Duration: Duration = Duration.ofMillis(120)
val period3Duration: Duration = Duration.ofMillis(110)
val totalDuration: Duration = Duration.ofSeconds(3)
val elem1: String = "a"
val elem2: String = "b"
val elem3: String = "c"
val testNow = Clock.systemUTC().instant()
val recordTimes1 = List(
testNow.minusSeconds(100),
testNow.minusSeconds(90),
)
// The assumption made here is that each consecutive element has higher record times
val recordTimes2 = List(
testNow.minusSeconds(70),
testNow.minusSeconds(40),
)
val recordTimes3 = List(
testNow.minusSeconds(20),
testNow.minusSeconds(15),
)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> recordTimes1,
elem2 -> recordTimes2,
elem3 -> recordTimes3,
)
)
val metric = ConsumptionSpeedMetric.empty[String](testRecordTimeFunction)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(period1Duration)
._1
.periodicValue(period2Duration)
._1
.onNext(elem3)
.periodicValue(period3Duration)
val finalValue = newMetric.finalValue(totalDuration)
val first = recordTimes2.last
val last = recordTimes3.last
val expectedSpeed =
(last.getEpochSecond - first.getEpochSecond) * 1000.0 / period3Duration.toMillis
periodicValue shouldBe Value(Some(expectedSpeed))
finalValue shouldBe Value(None)
}
"compute violated min speed SLO and the minimum speed" in {
val periodDuration: Duration = Duration.ofMillis(100)
val testNow = Clock.systemUTC().instant()
val minAllowedSpeed = 2.0
val elem1 = "a"
val elem2 = "b"
val elem3 = "c"
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> List(
testNow.minusMillis(5000),
testNow.minusMillis(4500),
testNow.minusMillis(4000),
), // ok, speed = 10.0
elem2 -> List(
testNow.minusMillis(3990),
testNow.minusMillis(3980),
testNow.minusMillis(3920), // not ok, speed 0.8
),
elem3 -> List(
testNow.minusMillis(3900),
testNow.minusMillis(3800),
testNow.minusMillis(3770), // not ok, speed 1.5
),
)
)
val objective = MinConsumptionSpeed(minAllowedSpeed)
val metric: ConsumptionSpeedMetric[String] =
ConsumptionSpeedMetric.empty[String](
recordTimeFunction = testRecordTimeFunction,
objective = Some(objective),
)
val violatedObjectives =
metric
.onNext(elem1)
.periodicValue(periodDuration)
._1
.onNext(elem2)
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
._1
.violatedPeriodicObjectives
violatedObjectives shouldBe List(
objective -> Value(Some(0.8))
)
}
}
private def recordTimeFunctionFromMap(
map: Map[String, List[Instant]]
)(str: String): List[Timestamp] =
map
.map { case (k, v) => k -> v.map(instantToTimestamp) }
.getOrElse(str, throw new RuntimeException(s"Unexpected record function argument: $str"))
private def instantToTimestamp(instant: Instant): Timestamp =
Timestamp.of(instant.getEpochSecond, instant.getNano)
}

View File

@ -1,286 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.CountRateMetric._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.time.Duration
import scala.language.existentials
class CountRateMetricSpec extends AnyWordSpec with Matchers {
"CountRateMetric" should {
"correctly handle initial state" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(1)
val metric: CountRateMetric[String] = anEmptyStringMetric()
val (_, periodicValue) = metric.periodicValue(periodDuration)
val finalValue = metric.finalValue(totalDuration)
periodicValue shouldBe Value(0.0)
finalValue shouldBe Value(0.0)
}
"compute values after processing elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: CountRateMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2)
periodicValue shouldBe Value(
ratePerSecond = totalCount * 1000.0 / periodDuration.toMillis
)
finalValue shouldBe Value(
ratePerSecond = totalCount / totalDuration.getSeconds.toDouble
)
}
"correctly handle periods with no elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: CountRateMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2)
periodicValue shouldBe Value(
ratePerSecond = 0.0
)
finalValue shouldBe Value(
ratePerSecond = totalCount / totalDuration.getSeconds.toDouble
)
}
"correctly handle multiple periods with elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: CountRateMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val elem3: String = "hij"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2) + stringLength(elem3)
periodicValue shouldBe Value(
ratePerSecond = stringLength(elem3) * 1000.0 / periodDuration.toMillis
)
finalValue shouldBe Value(
ratePerSecond = totalCount / totalDuration.getSeconds.toDouble
)
}
"compute violated minimum rate periodic SLO and the corresponing violating value" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val minAllowedRatePerSecond = 2.0
val objective = RateObjective.MinRate(minAllowedRatePerSecond)
val metric = anEmptyStringMetric(periodicObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
.onNext("de")
.periodicValue(periodDuration)
._1
.onNext("f")
.onNext("gh")
// During this period we get 3 elements: f, g, h, which means that the rate is 1.5
.periodicValue(periodDuration)
._1
.onNext("ijklmn")
.violatedPeriodicObjectives
violatedObjective shouldBe List(
objective -> Value(1.5)
)
}
"not report not violated periodic min rate objectives" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val minAllowedRatePerSecond = 2.0
val objective = RateObjective.MinRate(minAllowedRatePerSecond)
val metric = anEmptyStringMetric(periodicObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
.onNext("de")
.periodicValue(periodDuration)
._1
.onNext("f")
.onNext("gh")
.onNext("ijk")
.periodicValue(periodDuration)
._1
.onNext("lmnoprst")
.violatedPeriodicObjectives
violatedObjective shouldBe Nil
}
"report violated min rate final objective" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(6)
val minAllowedRatePerSecond = 2.0
val objective = RateObjective.MinRate(minAllowedRatePerSecond)
val metric = anEmptyStringMetric(finalObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
.periodicValue(periodDuration)
._1
.onNext("def")
.onNext("ghi")
// total rate is (3 + 3 + 3) / 6.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe List(
objective -> Value(1.5)
)
}
"not report non-violated min rate final objective" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(6)
val minAllowedRatePerSecond = 2.0
val objective = RateObjective.MinRate(minAllowedRatePerSecond)
val metric = anEmptyStringMetric(finalObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
.periodicValue(periodDuration)
._1
.onNext("def")
.onNext("ghi")
.onNext("jklmno")
// total rate is (3 + 3 + 3 + 6) / 6.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe Nil
}
"not report non-violated min rate final objective if the objective is violated only in a period" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(3)
val minAllowedRatePerSecond = 2.0
val objective = RateObjective.MinRate(minAllowedRatePerSecond)
val metric = anEmptyStringMetric(finalObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
// periodic rate is 3 / 2.0 = 1.5
.periodicValue(periodDuration)
._1
.onNext("def")
.onNext("ghi")
// total rate is (3 + 3 + 3) / 3.0 = 3.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe Nil
}
"report violated max rate final objective" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(3)
val objective = RateObjective.MaxRate(3.0)
val metric = CountRateMetric.empty[String](
countingFunction = stringLength,
periodicObjectives = Nil,
finalObjectives = List(objective),
)
val violatedObjective =
metric
.onNext("abc")
.periodicValue(periodDuration)
._1
.onNext("def")
.onNext("ghijkl")
// total rate is (3 + 3 + 6) / 3.0 = 4.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe List(
objective -> Value(4.0)
)
}
"not report non-violated max rate final objective" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(3)
val objective = RateObjective.MaxRate(3.0)
val metric = anEmptyStringMetric(finalObjectives = List(objective))
val violatedObjective =
metric
.onNext("abc")
.periodicValue(periodDuration)
._1
.onNext("def")
.onNext("ghi")
// total rate is (3 + 3 + 3) / 3.0 = 3.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe Nil
}
"not report non-violated max rate final objective if the objective is violated only in a period" in {
val periodDuration: Duration = Duration.ofSeconds(2)
val totalDuration: Duration = Duration.ofSeconds(4)
val objective = RateObjective.MaxRate(2.0)
val metric = anEmptyStringMetric(finalObjectives = List(objective))
val violatedObjective =
metric
.onNext("abcde")
// periodic rate is 5 / 2.0 = 2.5
.periodicValue(periodDuration)
._1
.onNext("f")
.onNext("gh")
// total rate is (5 + 1 + 2) / 4.0 = 2.0
.violatedFinalObjectives(totalDuration)
violatedObjective shouldBe Nil
}
}
private def stringLength(value: String): Int = value.length
private def anEmptyStringMetric(
periodicObjectives: List[CountRateMetric.RateObjective] = Nil,
finalObjectives: List[CountRateMetric.RateObjective] = Nil,
): CountRateMetric[String] =
CountRateMetric.empty[String](
countingFunction = stringLength,
periodicObjectives = periodicObjectives,
finalObjectives = finalObjectives,
)
}

View File

@ -1,244 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.DelayMetric._
import com.google.protobuf.timestamp.Timestamp
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.time.{Clock, Duration, Instant, ZoneId}
import scala.language.existentials
class DelayMetricSpec extends AnyWordSpec with Matchers {
DelayMetric.getClass.getSimpleName should {
"correctly handle initial state" in {
val periodDuration: Duration = Duration.ofMillis(100)
val metric: DelayMetric[String] = anEmptyDelayMetric(Clock.systemUTC())
val (_, periodicValue) = metric.periodicValue(periodDuration)
val totalDuration: Duration = Duration.ofSeconds(1)
val finalValue = metric.finalValue(totalDuration)
periodicValue shouldBe Value(None)
finalValue shouldBe Value(None)
}
"compute values after processing elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val elem1: String = "abc"
val elem2: String = "defgh"
val testNow = Clock.systemUTC().instant()
val recordTime1 = testNow.minusSeconds(11)
val recordTime2 = testNow.minusSeconds(22)
val recordTime3 = testNow.minusSeconds(33)
val delay1 = secondsBetween(recordTime1, testNow)
val delay2 = secondsBetween(recordTime2, testNow)
val delay3 = secondsBetween(recordTime3, testNow)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> List(recordTime1, recordTime2),
elem2 -> List(recordTime3),
)
)
val clock = Clock.fixed(testNow, ZoneId.of("UTC"))
val metric: DelayMetric[String] =
DelayMetric.empty[String](
recordTimeFunction = testRecordTimeFunction,
clock = clock,
)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val expectedMean = (delay1 + delay2 + delay3) / 3
periodicValue shouldBe Value(Some(expectedMean))
finalValue shouldBe Value(None)
}
"correctly handle periods with no elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val elem1: String = "abc"
val elem2: String = "defg"
val testNow = Clock.systemUTC().instant()
val recordTime1 = testNow.minusSeconds(11)
val recordTime2 = testNow.minusSeconds(22)
val recordTime3 = testNow.minusSeconds(33)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> List(recordTime1, recordTime2),
elem2 -> List(recordTime3),
)
)
val clock = Clock.fixed(testNow, ZoneId.of("UTC"))
val metric: DelayMetric[String] =
DelayMetric.empty[String](
recordTimeFunction = testRecordTimeFunction,
clock = clock,
)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
periodicValue shouldBe Value(None)
finalValue shouldBe Value(None)
}
"correctly handle multiple periods with elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val elem1: String = "abc"
val elem2: String = "defg"
val elem3: String = "hij"
val testNow = Clock.systemUTC().instant()
val recordTime1 = testNow.minusSeconds(11)
val recordTime2 = testNow.minusSeconds(22)
val recordTime3 = testNow.minusSeconds(33)
val recordTime4 = testNow.minusSeconds(44)
val recordTime5 = testNow.minusSeconds(55)
val delay4 = secondsBetween(recordTime4, testNow)
val delay5 = secondsBetween(recordTime5, testNow)
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> List(recordTime1, recordTime2),
elem2 -> List(recordTime3),
elem3 -> List(recordTime4, recordTime5),
)
)
val clock = Clock.fixed(testNow, ZoneId.of("UTC"))
val metric: DelayMetric[String] =
DelayMetric.empty[String](
recordTimeFunction = testRecordTimeFunction,
clock = clock,
)
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val expectedMean = (delay4 + delay5) / 2
periodicValue shouldBe Value(Some(expectedMean))
finalValue shouldBe Value(None)
}
"compute violated max delay SLO with the most extreme value" in {
val periodDuration: Duration = Duration.ofMillis(100)
val maxAllowedDelaySeconds: Long = 1000
val elem1: String = "abc"
val elem2: String = "defg"
val elem3: String = "hijkl"
val elem4: String = "mno"
val testNow = Clock.systemUTC().instant()
// first period
val recordTime1 =
testNow.minusSeconds(maxAllowedDelaySeconds - 100) // allowed record time
// second period
val recordTime2A =
testNow.minusSeconds(maxAllowedDelaySeconds + 100) // not allowed record time
val recordTime2B =
testNow.minusSeconds(maxAllowedDelaySeconds + 200) // not allowed record time
val delay2A = durationBetween(recordTime2A, testNow)
val delay2B = durationBetween(recordTime2B, testNow)
val meanInPeriod2 = delay2A.plus(delay2B).dividedBy(2).getSeconds
// third period - a period with record times higher than anywhere else,
// the mean delay from this period should be provided by the metric as the most violating value
val recordTime3A = testNow.minusSeconds(
maxAllowedDelaySeconds + 1100
) // not allowed record time
val recordTime3B = testNow.minusSeconds(
maxAllowedDelaySeconds + 1200
) // not allowed record time
val delay3A = durationBetween(recordTime3A, testNow)
val delay3B = durationBetween(recordTime3B, testNow)
val meanInPeriod3 = delay3A.plus(delay3B).dividedBy(2).getSeconds
// fourth period
val recordTime4 =
testNow.minusSeconds(maxAllowedDelaySeconds + 300) // not allowed record time
val delay4 = durationBetween(recordTime4, testNow)
val meanInPeriod4 = delay4.getSeconds
val maxDelay = List(meanInPeriod2, meanInPeriod3, meanInPeriod4).max
def testRecordTimeFunction: String => List[Timestamp] = recordTimeFunctionFromMap(
Map(
elem1 -> List(recordTime1),
elem2 -> List(recordTime2A, recordTime2B),
elem3 -> List(recordTime3A, recordTime3B),
elem4 -> List(recordTime4),
)
)
val expectedViolatedObjective = MaxDelay(maxAllowedDelaySeconds)
val clock = Clock.fixed(testNow, ZoneId.of("UTC"))
val metric: DelayMetric[String] =
DelayMetric.empty[String](
recordTimeFunction = testRecordTimeFunction,
clock = clock,
objective = Some(expectedViolatedObjective),
)
val violatedObjectives =
metric
.onNext(elem1)
.periodicValue(periodDuration)
._1
.onNext(elem2)
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
._1
.onNext(elem4)
.periodicValue(periodDuration)
._1
.violatedPeriodicObjectives
violatedObjectives shouldBe List(
expectedViolatedObjective -> Value(Some(maxDelay))
)
}
}
private def recordTimeFunctionFromMap(
map: Map[String, List[Instant]]
)(str: String): List[Timestamp] =
map
.map { case (k, v) => k -> v.map(instantToTimestamp) }
.getOrElse(str, throw new RuntimeException(s"Unexpected record function argument: $str"))
private def instantToTimestamp(instant: Instant): Timestamp =
Timestamp.of(instant.getEpochSecond, instant.getNano)
private def durationBetween(first: Instant, second: Instant): Duration =
Duration.between(first, second)
private def secondsBetween(first: Instant, second: Instant): Long =
Duration.between(first, second).getSeconds
private def dummyRecordTimesFunction(str: String): List[Timestamp] =
str.map(_ => Timestamp.of(100, 0)).toList
private def anEmptyDelayMetric(clock: Clock): DelayMetric[String] =
DelayMetric.empty[String](dummyRecordTimesFunction, clock)
}

View File

@ -1,66 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.LatencyMetric.MaxLatency
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.util.chaining._
import java.time.Duration
class LatencyMetricSpec extends AnyWordSpec with Matchers {
private val dummyPeriod = Duration.ofSeconds(1L)
LatencyMetric.getClass.getSimpleName should {
"compute correct values on updates" in {
LatencyMetric
.empty(maxLatencyObjectiveMillis = 0L)
.tap(_.finalValue(dummyPeriod) shouldBe LatencyMetric.Value(0L))
.tap(
_.periodicValue(dummyPeriod) shouldBe (LatencyMetric(
0L,
0,
MaxLatency(0),
) -> LatencyMetric.Value(0L))
)
.onNext(1000L)
.tap(_.finalValue(dummyPeriod) shouldBe LatencyMetric.Value(1000L))
.tap(
_.periodicValue(dummyPeriod) shouldBe (LatencyMetric(
1000L,
1,
MaxLatency(0),
) -> LatencyMetric.Value(1000L))
)
.onNext(2000L)
.tap(_.finalValue(dummyPeriod) shouldBe LatencyMetric.Value(1500L))
.tap(
_.periodicValue(dummyPeriod) shouldBe (LatencyMetric(
3000L,
2,
MaxLatency(0),
) -> LatencyMetric.Value(1500L))
)
}
}
MaxLatency.getClass.getSimpleName should {
"correctly report violated metric" in {
val maxObjectiveMillis = 1000L
LatencyMetric
.empty(maxLatencyObjectiveMillis = maxObjectiveMillis)
.onNext(nanosFromMillis(1000L))
.tap(_.violatedFinalObjectives(dummyPeriod) shouldBe empty)
.onNext(nanosFromMillis(2000L))
.tap(
_.violatedFinalObjectives(dummyPeriod) shouldBe List(
MaxLatency(nanosFromMillis(1000L)) -> LatencyMetric.Value(nanosFromMillis(1500L))
)
)
}
}
private def nanosFromMillis(millis: Long) = millis * 1000000L
}

View File

@ -1,51 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
import scala.util.Random
class MaxDelaySpec extends AnyWordSpec with Matchers with TableDrivenPropertyChecks {
"Maximum delay SLO" should {
"correctly report violation" in {
import DelayMetric.Value
val randomValue = Random.nextInt(10000).toLong
val randomSmaller = randomValue - 1
val randomLarger = randomValue + 1
val maxDelay = DelayMetric.MaxDelay(randomValue)
val cases = Table(
("Metric value", "Expected violated"),
(Value(None), false),
(Value(Some(randomSmaller)), false),
(Value(Some(randomValue)), false),
(Value(Some(randomLarger)), true),
)
forAll(cases) { (metricValue, expectedViolated) =>
maxDelay.isViolatedBy(metricValue) shouldBe expectedViolated
}
}
"correctly pick a value more violating requirements" in {
import DelayMetric.Value
val randomNumber = Random.nextInt(10).toLong
val higherNumber = randomNumber + 1
val cases = Table(
("first", "second", "expected result"),
(Value(Some(randomNumber)), Value(Some(higherNumber)), Value(Some(higherNumber))),
(Value(Some(higherNumber)), Value(Some(randomNumber)), Value(Some(higherNumber))),
(Value(Some(randomNumber)), Value(None), Value(Some(randomNumber))),
(Value(None), Value(Some(randomNumber)), Value(Some(randomNumber))),
(Value(None), Value(None), Value(None)),
)
forAll(cases) { (first, second, expected) =>
Ordering[Value].max(first, second) shouldBe expected
}
}
}
}

View File

@ -1,240 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import akka.actor.testkit.typed.scaladsl.{BehaviorTestKit, ScalaTestWithActorTestKit}
import akka.actor.typed.{ActorRef, Behavior}
import com.daml.clock.AdjustableClock
import org.scalatest.wordspec.AnyWordSpecLike
import java.time.{Clock, Duration, Instant, ZoneId}
import scala.util.Random
class MetricsCollectorSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike {
import MetricsCollector.Message
import MetricsCollector.Response
"The MetricsCollector" should {
"respond with empty periodic report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.PeriodicReportResponse]()
collector ! Message.PeriodicReportRequest(probe.ref)
probe.expectMessage(
Response.PeriodicReport(
values = List(
TestMetricValue("PERIODIC:")
)
)
)
}
"respond with correct periodic report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.PeriodicReportResponse]()
collector ! Message.NewValue("banana")
collector ! Message.NewValue("mango")
collector ! Message.PeriodicReportRequest(probe.ref)
probe.expectMessage(
Response.PeriodicReport(
values = List(
TestMetricValue("PERIODIC:banana-mango")
)
)
)
}
"not respond with a periodic report when requests are too frequent" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.PeriodicReportResponse]()
collector ! Message.NewValue("banana")
collector ! Message.NewValue("mango")
collector ! Message.PeriodicReportRequest(probe.ref)
probe.expectMessageType[Response.PeriodicReport]
clock.fastForward(Duration.ofSeconds(1))
collector ! Message.PeriodicReportRequest(probe.ref)
probe.expectMessage(Response.ReportNotReady)
}
"include objective-violating values in periodic report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.PeriodicReportResponse]()
collector ! Message.NewValue("banana")
collector ! Message.NewValue(TestObjective.TestViolatingValue)
collector ! Message.NewValue("mango")
collector ! Message.PeriodicReportRequest(probe.ref)
probe.expectMessage(
Response.PeriodicReport(
values = List(
TestMetricValue("PERIODIC:banana-tomato-mango")
)
)
)
}
"respond with empty final report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.FinalReport]()
collector ! Message.FinalReportRequest(probe.ref)
probe.expectMessage(
Response.FinalReport(
metricsData = List(
Response.MetricFinalReportData(
name = "Test Metric",
value = TestMetricValue("FINAL:"),
violatedObjectives = Nil,
)
),
totalDuration = Duration.ofSeconds(10),
)
)
}
"respond with correct final report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.FinalReport]()
collector ! Message.NewValue("mango")
collector ! Message.NewValue("banana")
collector ! Message.NewValue("cherry")
collector ! Message.FinalReportRequest(probe.ref)
probe.expectMessage(
Response.FinalReport(
metricsData = List(
Response.MetricFinalReportData(
name = "Test Metric",
value = TestMetricValue("FINAL:mango-banana-cherry"),
violatedObjectives = Nil,
)
),
totalDuration = Duration.ofSeconds(10),
)
)
}
"include information about violated objective in the final report" in new CollectorFixture {
val probe = testKit.createTestProbe[Response.FinalReport]()
collector ! Message.NewValue("mango")
collector ! Message.NewValue(TestObjective.TestViolatingValue)
collector ! Message.NewValue("cherry")
collector ! Message.FinalReportRequest(probe.ref)
probe.expectMessage(
Response.FinalReport(
metricsData = List(
Response.MetricFinalReportData(
name = "Test Metric",
value = TestMetricValue("FINAL:mango-tomato-cherry"),
violatedObjectives = List(
(
TestObjective,
TestMetricValue(TestObjective.TestViolatingValue),
)
),
)
),
totalDuration = Duration.ofSeconds(10),
)
)
}
"stop after receiving final report request" in {
val probe = testKit.createTestProbe[Response.FinalReport]()
val behaviorTestKit = BehaviorTestKit(behavior)
behaviorTestKit.isAlive shouldBe true
behaviorTestKit.run(Message.FinalReportRequest(probe.ref))
behaviorTestKit.isAlive shouldBe false
}
}
private class CollectorFixture {
private val now = Clock.systemUTC().instant()
private val tenSecondsAgo = now.minusSeconds(10)
private val minimumReportInterval = Duration.ofSeconds(5)
val clock = AdjustableClock(
baseClock = Clock.fixed(now, ZoneId.of("UTC")),
offset = Duration.ZERO,
)
val collector: ActorRef[Message] =
spawnWithFixedClock(clock, tenSecondsAgo, tenSecondsAgo, minimumReportInterval)
}
private def spawnWithFixedClock(
clock: Clock,
startTime: Instant,
lastPeriodicCheck: Instant,
minimumTimePeriodBetweenSubsequentReports: Duration,
) = {
val behavior =
new MetricsCollector[String](None, minimumTimePeriodBetweenSubsequentReports, clock)
.handlingMessages(
metrics = List(TestMetric()),
lastPeriodicCheck = lastPeriodicCheck,
startTime = startTime,
)
testKit.spawn(
behavior = behavior,
name = Random.alphanumeric.take(10).mkString,
)
}
private def behavior: Behavior[Message] = {
MetricsCollector[String](
metrics = List(TestMetric()),
exposedMetrics = None,
)
}
private case class TestMetricValue(value: String) extends MetricValue
private case object TestObjective extends ServiceLevelObjective[TestMetricValue] {
val TestViolatingValue = "tomato"
override def isViolatedBy(metricValue: TestMetricValue): Boolean =
metricValue.value == TestViolatingValue
}
private case class TestMetric(
processedElems: List[String] = List.empty
) extends Metric[String] {
override type V = TestMetricValue
override type Objective = TestObjective.type
override def name: String = "Test Metric"
override def onNext(value: String): Metric[String] = {
this.copy(processedElems = processedElems :+ value)
}
override def periodicValue(periodDuration: Duration): (Metric[String], TestMetricValue) = {
(this, TestMetricValue(s"PERIODIC:${processedElems.mkString("-")}"))
}
override def finalValue(totalDuration: Duration): TestMetricValue = {
TestMetricValue(s"FINAL:${processedElems.mkString("-")}")
}
override def violatedPeriodicObjectives: List[(TestObjective.type, TestMetricValue)] =
if (processedElems.contains(TestObjective.TestViolatingValue))
List(TestObjective -> TestMetricValue(TestObjective.TestViolatingValue))
else
Nil
override def violatedFinalObjectives(
totalDuration: Duration
): List[(TestObjective.type, TestMetricValue)] = Nil
}
}

View File

@ -1,19 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import java.time.Duration
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.concurrent.duration.FiniteDuration
class MetricsSetSpec extends AnyFlatSpec with Matchers {
it should "convert Scala's FiniteDuration to Java's Duration" in {
MetricsSet.toJavaDuration(FiniteDuration(5, "seconds")) shouldBe Duration.ofSeconds(5)
}
}

View File

@ -1,33 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
import scala.util.Random
class MinConsumptionSpeedSpec extends AnyWordSpec with Matchers with TableDrivenPropertyChecks {
"Min consumption speed SLO" should {
"correctly report violation" in {
import ConsumptionSpeedMetric.Value
val objectiveSpeed = Random.nextDouble()
val objective = ConsumptionSpeedMetric.MinConsumptionSpeed(objectiveSpeed)
val lowerSpeed = objectiveSpeed - 1.0
val higherSpeed = objectiveSpeed + 1.0
val cases = Table(
("Metric value", "Expected violated"),
(Value(None), true),
(Value(Some(lowerSpeed)), true),
(Value(Some(objectiveSpeed)), false),
(Value(Some(higherSpeed)), false),
)
forAll(cases) { (metricValue, expectedViolated) =>
objective.isViolatedBy(metricValue) shouldBe expectedViolated
}
}
}
}

View File

@ -1,102 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.time.Duration
class SizeMetricSpec extends AnyWordSpec with Matchers {
SizeMetric.getClass.getSimpleName should {
"correctly handle initial state" in {
val totalDuration: Duration = Duration.ofSeconds(1)
val periodDuration: Duration = Duration.ofMillis(100)
val metric: SizeMetric[String] = anEmptySizeMetric()
val (_, periodicValue) = metric.periodicValue(periodDuration)
val finalValue = metric.finalValue(totalDuration)
periodicValue shouldBe SizeMetric.Value(0.0)
finalValue shouldBe SizeMetric.Value(0.0)
}
"compute values after processing elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: SizeMetric[String] = anEmptySizeMetric()
val elem1: String = "abc"
val elem2: String = "defghi"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalSizeMegabytes =
(testSizingFunction(elem1) + testSizingFunction(elem2)).toDouble / 1024 / 1024
periodicValue shouldBe SizeMetric.Value(totalSizeMegabytes * 1000.0 / periodDuration.toMillis)
finalValue shouldBe SizeMetric.Value(totalSizeMegabytes * 1000.0 / periodDuration.toMillis)
}
"correctly handle periods with no elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: SizeMetric[String] = anEmptySizeMetric()
val elem1: String = "abc"
val elem2: String = "defghi"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val firstPeriodMegabytes =
(testSizingFunction(elem1) + testSizingFunction(elem2)).toDouble / 1024 / 1024
val firstPeriodMean = firstPeriodMegabytes * 1000.0 / periodDuration.toMillis
val secondPeriodMean = 0.0
val totalMean = (firstPeriodMean + secondPeriodMean) / 2
periodicValue shouldBe SizeMetric.Value(secondPeriodMean)
finalValue shouldBe SizeMetric.Value(totalMean)
}
"correctly handle multiple periods with elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: SizeMetric[String] = anEmptySizeMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val elem3: String = "hij"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val firstPeriodMegabytes =
(testSizingFunction(elem1) + testSizingFunction(elem2)).toDouble / 1024 / 1024
val firstPeriodMean = firstPeriodMegabytes * 1000.0 / periodDuration.toMillis
val secondPeriodMean = 0.0
val thirdPeriodMegabytes = testSizingFunction(elem3).toDouble / 1024 / 1024
val thirdPeriodMean = thirdPeriodMegabytes * 1000.0 / periodDuration.toMillis
val totalMean = (firstPeriodMean + secondPeriodMean + thirdPeriodMean) / 3
periodicValue shouldBe SizeMetric.Value(thirdPeriodMean)
finalValue shouldBe SizeMetric.Value(totalMean)
}
}
private def testSizingFunction(value: String): Long = value.length.toLong * 12345
private def anEmptySizeMetric(): SizeMetric[String] =
SizeMetric.empty[String](sizingFunction = testSizingFunction)
}

View File

@ -1,90 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics
import com.daml.ledger.api.benchtool.metrics.TotalCountMetric.Value
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.time.Duration
class TotalCountMetricSpec extends AnyWordSpec with Matchers {
TotalCountMetric.getClass.getSimpleName should {
"correctly handle initial state" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(1)
val metric: TotalCountMetric[String] = anEmptyStringMetric()
val (_, periodicValue) = metric.periodicValue(periodDuration)
val finalValue = metric.finalValue(totalDuration)
periodicValue shouldBe Value(0)
finalValue shouldBe Value(0)
}
"compute values after processing elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: TotalCountMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2)
periodicValue shouldBe Value(totalCount)
finalValue shouldBe Value(totalCount)
}
"correctly handle periods with no elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: TotalCountMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2)
periodicValue shouldBe Value(totalCount)
finalValue shouldBe Value(totalCount)
}
"correctly handle multiple periods with elements" in {
val periodDuration: Duration = Duration.ofMillis(100)
val totalDuration: Duration = Duration.ofSeconds(5)
val metric: TotalCountMetric[String] = anEmptyStringMetric()
val elem1: String = "abc"
val elem2: String = "defg"
val elem3: String = "hij"
val (newMetric, periodicValue) = metric
.onNext(elem1)
.onNext(elem2)
.periodicValue(periodDuration)
._1
.onNext(elem3)
.periodicValue(periodDuration)
val finalValue = newMetric.finalValue(totalDuration)
val totalCount: Int = stringLength(elem1) + stringLength(elem2) + stringLength(elem3)
periodicValue shouldBe Value(totalCount)
finalValue shouldBe Value(totalCount)
}
}
private def stringLength(value: String): Int = value.length
private def anEmptyStringMetric(): TotalCountMetric[String] =
TotalCountMetric.empty[String](stringLength)
}

View File

@ -1,64 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.metrics.metrics
import java.time.{Clock, Duration, Instant, ZoneId}
import com.daml.clock.AdjustableClock
import com.daml.ledger.api.benchtool.metrics.metrics.TotalRuntimeMetric.{
MaxDurationObjective,
Value,
}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class TotalRuntimeMetricSpec extends AnyFlatSpec with Matchers {
it should "keep track of total stream runtime" in {
val startTime = Instant.EPOCH.plusMillis(1000)
val clock = AdjustableClock(Clock.fixed(startTime, ZoneId.systemDefault()), Duration.ZERO)
val objective = MaxDurationObjective(
maxValue = Duration.ofMillis(103)
)
val tested = TotalRuntimeMetric[Any](
clock = clock,
startTime = clock.instant,
objective = objective,
)
val ignoredDuration = Duration.ofMillis(0)
val item = new Object()
tested.periodicValue(ignoredDuration) shouldBe (tested, Value(Duration.ZERO))
clock.fastForward(Duration.ofMillis(15))
tested.onNext(item)
tested.periodicValue(ignoredDuration) shouldBe (tested, Value(Duration.ofMillis(15)))
clock.fastForward(Duration.ofMillis(30))
tested.onNext(item)
tested.periodicValue(ignoredDuration) shouldBe (tested, Value(Duration.ofMillis(45)))
tested.violatedFinalObjectives(ignoredDuration) shouldBe Nil
tested.violatedPeriodicObjectives shouldBe Nil
tested.finalValue(ignoredDuration) shouldBe Value(Duration.ofMillis(45))
clock.fastForward(Duration.ofMillis(100))
tested.onNext(item)
tested.periodicValue(ignoredDuration) shouldBe (tested, Value(Duration.ofMillis(145)))
tested.violatedPeriodicObjectives shouldBe Nil
tested.violatedFinalObjectives(ignoredDuration) shouldBe List(
objective -> Value(Duration.ofMillis(145))
)
tested.finalValue(ignoredDuration) shouldBe Value(Duration.ofMillis(145))
clock.fastForward(Duration.ofMillis(100))
tested.onNext(item)
tested.violatedFinalObjectives(ignoredDuration) shouldBe List(
objective -> Value(Duration.ofMillis(245))
)
}
}

View File

@ -1,29 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.v1.value.Value
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class ActiveContractKeysPoolSpec extends AnyFlatSpec with Matchers {
it should "put and pop from a pool" in {
val tested = new ActiveContractKeysPool(RandomnessProvider.forSeed(0))
intercept[NoSuchElementException](tested.getAndRemoveContractKey(templateName = "t1"))
tested.addContractKey(templateName = "t1", key = makeValue("1"))
intercept[NoSuchElementException](tested.getAndRemoveContractKey(templateName = "t2"))
tested.getAndRemoveContractKey("t1") shouldBe makeValue("1")
intercept[IndexOutOfBoundsException](tested.getAndRemoveContractKey(templateName = "t1"))
tested.addContractKey(templateName = "t1", key = makeValue("1"))
tested.addContractKey(templateName = "t1", key = makeValue("2"))
tested.addContractKey(templateName = "t1", key = makeValue("3"))
tested.addContractKey(templateName = "t2", key = makeValue("1"))
tested.getAndRemoveContractKey("t1") shouldBe makeValue("3")
tested.getAndRemoveContractKey("t1") shouldBe makeValue("1")
tested.getAndRemoveContractKey("t2") shouldBe makeValue("1")
}
private def makeValue(payload: String): Value = Value(Value.Sum.Text(payload))
}

View File

@ -1,99 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.client.binding.Primitive
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class AllocatedPartiesSpec extends AnyFlatSpec with Matchers {
it should "apportion parties appropriately" in {
AllocatedParties.forExistingParties(
parties = List(
"signatory-123",
"Obs-0",
"Obs-1",
"Div-0",
"Sub-0",
"FooParty-0",
"FooParty-1",
"BarParty-100",
"MyOtherParty-0",
),
partyPrefixesForPartySets = List("FooParty", "BarParty"),
) shouldBe AllocatedParties(
signatoryO = Some(Primitive.Party("signatory-123")),
observers = List(
Primitive.Party("Obs-0"),
Primitive.Party("Obs-1"),
),
divulgees = List(Primitive.Party("Div-0")),
extraSubmitters = List(Primitive.Party("Sub-0")),
observerPartySets = List(
AllocatedPartySet(
mainPartyNamePrefix = "FooParty",
parties = List(Primitive.Party("FooParty-0"), Primitive.Party("FooParty-1")),
),
AllocatedPartySet(
mainPartyNamePrefix = "BarParty",
parties = List(Primitive.Party("BarParty-100")),
),
),
)
}
it should "apportion parties appropriately - minimal" in {
AllocatedParties.forExistingParties(
parties = List(
"signatory-123"
),
partyPrefixesForPartySets = List.empty,
) shouldBe AllocatedParties(
signatoryO = Some(Primitive.Party("signatory-123")),
observers = List.empty,
divulgees = List.empty,
extraSubmitters = List.empty,
observerPartySets = List.empty,
)
}
it should "find party sets for any party prefix" in {
AllocatedParties.forExistingParties(
parties = List(
"Party-01",
"Party-02",
"Party-10",
"Foo-01",
"Bar-02",
"Baz-03",
),
partyPrefixesForPartySets = List("Party-0", "Foo-", "Bar"),
) shouldBe AllocatedParties(
signatoryO = None,
observers = List.empty,
divulgees = List.empty,
extraSubmitters = List.empty,
observerPartySets = List(
AllocatedPartySet(
"Party",
parties = List(
Primitive.Party("Party-01"),
Primitive.Party("Party-02"),
Primitive.Party("Party-10"),
),
),
AllocatedPartySet(
"Foo",
parties = List(Primitive.Party("Foo-01")),
),
AllocatedPartySet(
"Bar",
parties = List(Primitive.Party("Bar-02")),
),
),
)
}
}

View File

@ -1,25 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com
package daml.ledger.api.benchtool.submission
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class DepletingUniformRandomPoolSpec extends AnyFlatSpec with Matchers {
it should "put and pop from a pool" in {
val tested = new DepletingUniformRandomPool[Int](RandomnessProvider.forSeed(0))
intercept[IndexOutOfBoundsException](tested.pop())
tested.put(1)
tested.pop() shouldBe 1
tested.put(1)
tested.put(2)
tested.put(3)
tested.pop() shouldBe 3
tested.pop() shouldBe 1
tested.pop() shouldBe 2
intercept[IndexOutOfBoundsException](tested.pop())
}
}

View File

@ -1,52 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import org.scalacheck.Gen
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
class DistributionSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks {
implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
PropertyCheckConfiguration(minSuccessful = 100)
"Distribution" should {
val MaxValue = 1000000
val smallInt = Gen.choose(1, MaxValue)
val zeroToOneDouble: Gen[Double] =
Gen.choose(0, Int.MaxValue - 1).map(_.toDouble / Int.MaxValue)
val listOfWeights: Gen[List[Int]] = Gen.choose(1, 50).flatMap(Gen.listOfN(_, smallInt))
"handle single-element list" in {
val cases: Gen[(Int, Double)] = for {
weight <- smallInt
double <- zeroToOneDouble
} yield (weight, double)
forAll(cases) { case (weight, d) =>
val sentinel = new Object()
val distribution = new Distribution[Object](List(weight), IndexedSeq(sentinel))
distribution.choose(d) shouldBe sentinel
}
}
"handle multi-element list" in {
val cases = for {
double <- zeroToOneDouble
weights <- listOfWeights
} yield (weights, double)
forAll(cases) { case (weights, d) =>
val distribution = new Distribution[Int](weights, items = weights.toIndexedSeq)
val index = distribution.index(d)
val totalWeight = weights.map(_.toLong).sum
weights.take(index).map(_.toDouble / totalWeight).sum should be <= d
weights.take(index + 1).map(_.toDouble / totalWeight).sum should be > d
}
}
}
}

View File

@ -1,79 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.BenchtoolSandboxFixture
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundAll
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import org.scalatest.AppendedClues
import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.matchers.should.Matchers
class FibonacciCommandSubmitterITSpec
extends AsyncFlatSpec
with BenchtoolSandboxFixture
with SuiteResourceManagementAroundAll
with Matchers
with AppendedClues {
it should "populate create fibonacci contracts" in {
val config = WorkflowConfig.FibonacciSubmissionConfig(
numberOfInstances = 10,
uniqueParties = false,
value = 7,
waitForSubmission = true,
)
for {
(apiServices, names, submitter) <- benchtoolFixture()
allocatedParties <- submitter.prepare(config)
_ = allocatedParties.divulgees shouldBe empty
generator = new FibonacciCommandGenerator(
signatory = allocatedParties.signatory,
config = config,
names = names,
)
_ <- submitter.generateAndSubmit(
generator = generator,
config = config,
baseActAs = List(allocatedParties.signatory) ++ allocatedParties.divulgees,
maxInFlightCommands = 1,
submissionBatchSize = 5,
)
eventsObserver = TreeEventsObserver(expectedTemplateNames =
Set(
"InefficientFibonacci",
"InefficientFibonacciResult",
)
)
_ <- apiServices.transactionService.transactionTrees(
config = WorkflowConfig.StreamConfig.TransactionTreesStreamConfig(
name = "dummy-name",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = allocatedParties.signatory.toString,
templates = List.empty,
interfaces = List.empty,
)
),
beginOffset = None,
endOffset = Some(LedgerOffset().withBoundary(LedgerOffset.LedgerBoundary.LEDGER_END)),
objectives = None,
maxItemCount = None,
timeoutO = None,
),
observer = eventsObserver,
)
observerResult <- eventsObserver.result
} yield {
observerResult.numberOfCreatesPerTemplateName(
"InefficientFibonacci"
) shouldBe config.numberOfInstances withClue ("number of create events")
succeed
}
}
}

View File

@ -1,20 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import java.nio.charset.StandardCharsets
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class FooCommandGeneratorSpec extends AnyFlatSpec with Matchers {
it should "generate random payload of a given size" in {
FooCommandGenerator
.randomPayload(RandomnessProvider.Default, sizeBytes = 100)
.getBytes(StandardCharsets.UTF_8)
.length shouldBe 100
}
}

View File

@ -1,168 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.BenchtoolSandboxFixture
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.config.WorkflowConfig.FooSubmissionConfig.{
ConsumingExercises,
NonconsumingExercises,
}
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundAll
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import com.daml.ledger.client.binding
import com.daml.scalautil.Statement.discard
import org.scalatest.{AppendedClues, Checkpoints}
import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.concurrent.Future
class FooCommandSubmitterITSpec
extends AsyncFlatSpec
with BenchtoolSandboxFixture
with SuiteResourceManagementAroundAll
with Matchers
with AppendedClues
with Checkpoints {
it should "populate participant with create, consuming and non consuming exercises" in {
val foo1Config = WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo1",
weight = 1,
payloadSizeBytes = 100,
)
val foo2Config = WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo2",
weight = 1,
payloadSizeBytes = 100,
)
val consumingExercisesConfig = ConsumingExercises(
probability = 1.0,
payloadSizeBytes = 100,
)
val nonConsumingExercisesConfig = NonconsumingExercises(
probability = 2.0,
payloadSizeBytes = 100,
)
val config = WorkflowConfig.FooSubmissionConfig(
numberOfInstances = 100,
numberOfObservers = 2,
numberOfDivulgees = 0,
numberOfExtraSubmitters = 0,
uniqueParties = false,
instanceDistribution = List(
foo1Config,
foo2Config,
),
nonConsumingExercises = Some(nonConsumingExercisesConfig),
consumingExercises = Some(consumingExercisesConfig),
applicationIds = List.empty,
)
for {
(apiServices, allocatedParties, fooSubmission) <- benchtoolFooSubmissionFixture(config)
_ = allocatedParties.divulgees shouldBe empty
_ <- fooSubmission.performSubmission(submissionConfig = config)
observerResult_signatory: ObservedEvents <- treeEventsObserver(
apiServices = apiServices,
party = allocatedParties.signatory,
)
observerResult_observer0: ObservedEvents <- treeEventsObserver(
apiServices = apiServices,
party = allocatedParties.observers(0),
)
observerResult_observer1: ObservedEvents <- treeEventsObserver(
apiServices = apiServices,
party = allocatedParties.observers(1),
)
} yield {
val cp = new Checkpoint
cp(
discard(
observerResult_signatory.createEvents.size shouldBe config.numberOfInstances withClue ("number of create events")
)
)
cp(
discard(
observerResult_signatory.avgSizeOfCreateEventPerTemplateName(
"Foo1"
) shouldBe 162 +- 20 withClue ("payload size of create Foo1")
)
)
cp(
discard(
observerResult_signatory.avgSizeOfCreateEventPerTemplateName(
"Foo2"
) shouldBe 162 +- 20 withClue ("payload size of create Foo2")
)
)
cp(
discard(
observerResult_signatory.avgSizeOfConsumingExercise shouldBe 108
)
)
cp(
discard(
observerResult_signatory.avgSizeOfNonconsumingExercise shouldBe 108
)
)
cp(
discard(
observerResult_signatory.consumingExercises.size.toDouble shouldBe (config.numberOfInstances * consumingExercisesConfig.probability) withClue ("number of consuming exercises")
)
)
val expectedNumberOfNonConsumingExercises =
config.numberOfInstances * nonConsumingExercisesConfig.probability.toInt
cp(
discard(
observerResult_signatory.nonConsumingExercises.size shouldBe expectedNumberOfNonConsumingExercises withClue ("number of non consuming exercises visible to signatory")
)
)
// First observer can see all non-consuming events
cp(
discard(
observerResult_observer0.nonConsumingExercises.size shouldBe expectedNumberOfNonConsumingExercises withClue ("number of non consuming exercises visible to Obs-0")
)
)
// Second observer can see ~10% of all non-consuming events
cp(
discard(
observerResult_observer1.nonConsumingExercises.size shouldBe 14 withClue ("number of non consuming exercises visible to Obs-1")
)
)
cp.reportAll()
succeed
}
}
private def treeEventsObserver(
apiServices: LedgerApiServices,
party: binding.Primitive.Party,
): Future[ObservedEvents] = {
val eventsObserver = TreeEventsObserver(expectedTemplateNames = Set("Foo1", "Foo2"))
val config = WorkflowConfig.StreamConfig.TransactionTreesStreamConfig(
name = "dummy-name",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = party.toString,
templates = List.empty,
interfaces = List.empty,
)
),
beginOffset = None,
endOffset = Some(LedgerOffset().withBoundary(LedgerOffset.LedgerBoundary.LEDGER_END)),
objectives = None,
maxItemCount = None,
timeoutO = None,
)
apiServices.transactionService.transactionTrees(
config = config,
observer = eventsObserver,
)
}
}

View File

@ -1,108 +0,0 @@
// Copyright (c) 2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.benchtool.submission
import com.daml.ledger.api.benchtool.{BenchtoolSandboxFixture, ConfigEnricher}
import com.daml.ledger.api.benchtool.config.WorkflowConfig
import com.daml.ledger.api.benchtool.services.LedgerApiServices
import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundAll
import com.daml.ledger.client.binding
import org.scalatest.AppendedClues
import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.concurrent.Future
class InterfaceSubscriptionITSpec
extends AsyncFlatSpec
with BenchtoolSandboxFixture
with SuiteResourceManagementAroundAll
with Matchers
with AppendedClues {
it should "make interface subscriptions exposed to the benchtool" in {
val foo1Config = WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo1",
weight = 1,
payloadSizeBytes = 100,
)
val foo2Config = WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo2",
weight = 1,
payloadSizeBytes = 100,
)
val foo3Config = WorkflowConfig.FooSubmissionConfig.ContractDescription(
template = "Foo3",
weight = 1,
payloadSizeBytes = 100,
)
val config = WorkflowConfig.FooSubmissionConfig(
numberOfInstances = 100,
numberOfObservers = 2,
numberOfDivulgees = 0,
numberOfExtraSubmitters = 0,
uniqueParties = false,
instanceDistribution = List(
foo1Config,
foo2Config,
foo3Config,
),
nonConsumingExercises = None,
consumingExercises = None,
applicationIds = List.empty,
)
for {
(apiServices, allocatedParties, fooSubmission) <- benchtoolFooSubmissionFixture(config)
configDesugaring = new ConfigEnricher(
allocatedParties,
BenchtoolTestsPackageInfo.StaticDefault,
)
_ <- fooSubmission.performSubmission(submissionConfig = config)
observedEvents <- observer(
configDesugaring = configDesugaring,
apiServices = apiServices,
party = allocatedParties.signatory,
)
} yield {
observedEvents.createEvents.forall(_.interfaceViews.nonEmpty) shouldBe true
observedEvents.createEvents
.flatMap(_.interfaceViews)
.forall(_.serializedSize > 0) shouldBe true
observedEvents.createEvents
.flatMap(_.interfaceViews)
.map(_.interfaceName)
.toSet shouldBe Set("FooI2", "FooI1", "FooI3")
}
}
private def observer(
configDesugaring: ConfigEnricher,
apiServices: LedgerApiServices,
party: binding.Primitive.Party,
): Future[ObservedEvents] = {
val config = WorkflowConfig.StreamConfig.ActiveContractsStreamConfig(
name = "dummy-name",
filters = List(
WorkflowConfig.StreamConfig.PartyFilter(
party = party.toString,
templates = List.empty,
interfaces = List("FooI2", "FooI1", "FooI3"),
)
),
objectives = None,
maxItemCount = None,
timeoutO = None,
)
apiServices.activeContractsService.getActiveContracts(
config = configDesugaring
.enrichStreamConfig(config)
.asInstanceOf[WorkflowConfig.StreamConfig.ActiveContractsStreamConfig],
observer = ActiveContractsObserver(Set("Foo1", "Foo2", "Foo3")),
)
}
}

Some files were not shown because too many files have changed in this diff Show More