update canton to 20240606.13448.v43783c88 (#19344)

* update canton to 20240606.13448.v43783c88

tell-slack: canton

* Commit to cherry-pick.

---------

Co-authored-by: Azure Pipelines Daml Build <support@digitalasset.com>
Co-authored-by: Andreas Triantafyllos <andreas.triantafyllos@digitalasset.com>
This commit is contained in:
azure-pipelines[bot] 2024-06-10 00:03:29 +02:00 committed by GitHub
parent ebd948e3f2
commit ce50fa26b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
85 changed files with 1403 additions and 1022 deletions

View File

@ -23,10 +23,11 @@ service PackageService {
// Upload a DAR file and all packages inside to the participant node
rpc UploadDar(UploadDarRequest) returns (UploadDarResponse);
// Upload a DAR file and all packages inside to the participant node
// Validate a DAR file and all packages inside to the participant node
rpc ValidateDar(ValidateDarRequest) returns (ValidateDarResponse);
// Remove a package that is not vetted
// TODO(#17635): This operation is not safe as it can remove packages still in use by the Ledger API
rpc RemovePackage(RemovePackageRequest) returns (RemovePackageResponse);
// Enable vetting for all packages pertaining to a specific DAR
@ -38,6 +39,7 @@ service PackageService {
rpc UnvetDar(UnvetDarRequest) returns (UnvetDarResponse);
// Remove a DAR that is not needed
// TODO(#17635): This operation is not safe as it can remove packages still in use by the Ledger API
rpc RemoveDar(RemoveDarRequest) returns (RemoveDarResponse);
// Obtain a DAR file by hash -- for inspection & download

View File

@ -184,44 +184,6 @@ object EnterpriseSequencerAdminCommands {
override def timeoutType: TimeoutType = DefaultUnboundedTimeout
}
final case class GenesisState(
timestamp: Option[CantonTimestamp]
) extends BaseSequencerAdministrationCommand[
v30.GenesisStateRequest,
v30.GenesisStateResponse,
ByteString,
] {
override def createRequest(): Either[String, v30.GenesisStateRequest] =
Right(
v30.GenesisStateRequest(
timestamp = timestamp.map(_.toProtoTimestamp)
)
)
override def submitRequest(
service: v30.SequencerAdministrationServiceGrpc.SequencerAdministrationServiceStub,
request: v30.GenesisStateRequest,
): Future[v30.GenesisStateResponse] = service.genesisState(request)
override def handleResponse(
response: v30.GenesisStateResponse
): Either[String, ByteString] =
response.value match {
case v30.GenesisStateResponse.Value
.Failure(v30.GenesisStateResponse.Failure(reason)) =>
Left(reason)
case v30.GenesisStateResponse.Value
.Success(
v30.GenesisStateResponse.Success(genesisState)
) =>
Right(genesisState)
case _ => Left("response is empty")
}
// command will potentially take a long time
override def timeoutType: TimeoutType = DefaultUnboundedTimeout
}
final case class Prune(timestamp: CantonTimestamp)
extends BaseSequencerPruningAdministrationCommand[
v30.SequencerPruning.PruneRequest,

View File

@ -99,9 +99,10 @@ import com.daml.ledger.api.v2.testing.time_service.{
TimeServiceGrpc,
}
import com.daml.ledger.api.v2.transaction.{Transaction, TransactionTree}
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.{
CumulativeFilter,
Filters,
InclusiveFilters,
TemplateFilter,
TransactionFilter,
}
@ -1408,9 +1409,9 @@ object LedgerApiCommands {
val filter =
if (templateFilter.nonEmpty) {
Filters(
Some(
InclusiveFilters(templateFilters =
templateFilter.map(tId =>
templateFilter.map(tId =>
CumulativeFilter(
IdentifierFilter.TemplateFilter(
TemplateFilter(Some(tId.toIdentifier), includeCreatedEventBlob)
)
)

View File

@ -13,8 +13,10 @@ import com.digitalasset.canton.admin.api.client.data.*
import com.digitalasset.canton.admin.api.client.data.topology.*
import com.digitalasset.canton.config.RequireTypes.PositiveInt
import com.digitalasset.canton.crypto.Fingerprint
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.topology.*
import com.digitalasset.canton.topology.admin.grpc.BaseQuery
import com.digitalasset.canton.topology.admin.grpc.TopologyStore.Domain
import com.digitalasset.canton.topology.admin.v30
import com.digitalasset.canton.topology.admin.v30.AuthorizeRequest.Type.{Proposal, TransactionHash}
import com.digitalasset.canton.topology.admin.v30.IdentityInitializationServiceGrpc.IdentityInitializationServiceStub
@ -567,6 +569,39 @@ object TopologyAdminCommands {
): Either[String, ByteString] =
Right(response.result)
}
final case class GenesisState(
filterDomainStore: Option[String],
timestamp: Option[CantonTimestamp],
) extends BaseCommand[
v30.GenesisStateRequest,
v30.GenesisStateResponse,
ByteString,
] {
override def createRequest(): Either[String, v30.GenesisStateRequest] = {
val domainStore = filterDomainStore.traverse(DomainId.fromString)
domainStore.flatMap(domainId =>
Right(
v30.GenesisStateRequest(
domainId.map(Domain).map(_.toProto),
timestamp.map(_.toProtoTimestamp),
)
)
)
}
override def submitRequest(
service: TopologyManagerReadServiceStub,
request: v30.GenesisStateRequest,
): Future[v30.GenesisStateResponse] = service.genesisState(request)
override def handleResponse(
response: v30.GenesisStateResponse
): Either[String, ByteString] =
Right(response.genesisStateForSequencer)
// command will potentially take a long time
override def timeoutType: TimeoutType = DefaultUnboundedTimeout
}
}
object Aggregation {

View File

@ -31,6 +31,7 @@ import com.digitalasset.canton.config.ConfigErrors.{
SubstitutionError,
}
import com.digitalasset.canton.config.InitConfigBase.NodeIdentifierConfig
import com.digitalasset.canton.config.PackageMetadataViewConfig
import com.digitalasset.canton.config.RequireTypes.*
import com.digitalasset.canton.console.{AmmoniteConsoleConfig, FeatureFlag}
import com.digitalasset.canton.crypto.*
@ -75,7 +76,6 @@ import com.digitalasset.canton.platform.apiserver.configuration.{
RateLimitingConfig,
}
import com.digitalasset.canton.platform.config.ActiveContractsServiceStreamsConfig
import com.digitalasset.canton.platform.indexer.PackageMetadataViewConfig
import com.digitalasset.canton.protocol.AcsCommitmentsCatchUpConfig
import com.digitalasset.canton.protocol.DomainParameters.MaxRequestSize
import com.digitalasset.canton.pureconfigutils.HttpServerConfig

View File

@ -40,26 +40,6 @@ class SequencerSetupGroup(node: SequencerReference) extends ConsoleCommandGroup.
}
}
@Help.Summary(
"Download the genesis state for a sequencer. This method should be used when performing major upgrades."
)
@Help.Description(
"""Download the a topology snapshot which includes all the history for major upgrades. The validFrom and validUntil are set the MinValue.ImmediateSuccessor.
|timestamp: If not specified, the max effective time of the latest topology transaction is used. Otherwise, the given timestamp is used.
|""".stripMargin
)
def genesis_state_for_sequencer(
timestamp: Option[CantonTimestamp] = None
): ByteString = {
consoleEnvironment.run {
runner.adminCommand(
EnterpriseSequencerAdminCommands.GenesisState(
timestamp = timestamp
)
)
}
}
@Help.Summary(
"Download the onboarding state for a given sequencer"
)

View File

@ -377,6 +377,31 @@ class TopologyAdministrationGroup(
}
}
@Help.Summary(
"Download the genesis state for a sequencer. This method should be used when performing a major domain upgrade."
)
@Help.Description(
"""Download the topology snapshot which includes the entire history of topology transactions to initialize a sequencer for a major domain upgrades. The validFrom and validUntil are set to SignedTopologyTransaction.InitialTopologySequencingTime.
|filterDomainStore: Must be specified if the genesis state is requested from a participant node.
|timestamp: If not specified, the max effective time of the latest topology transaction is used. Otherwise, the given timestamp is used.
|""".stripMargin
)
def genesis_state(
filterDomainStore: String = "",
timestamp: Option[CantonTimestamp] = None,
): ByteString = {
consoleEnvironment
.run {
adminCommand(
TopologyAdminCommands.Read.GenesisState(
filterDomainStore = OptionUtil
.emptyStringAsNone(filterDomainStore),
timestamp = timestamp,
)
)
}
}
@Help.Summary("Find the latest transaction for a given mapping hash")
@Help.Description(
"""

View File

@ -36,6 +36,9 @@ service TopologyManagerReadService {
rpc ListAvailableStores(ListAvailableStoresRequest) returns (ListAvailableStoresResponse);
rpc ListAll(ListAllRequest) returns (ListAllResponse);
rpc ExportTopologySnapshot(ExportTopologySnapshotRequest) returns (ExportTopologySnapshotResponse);
// Fetch the genesis topology state.
// The returned bytestring can be used directly to initialize a sequencer.
rpc GenesisState(GenesisStateRequest) returns (GenesisStateResponse);
}
message BaseQuery {
@ -297,3 +300,15 @@ message ListAllResponse {
message ExportTopologySnapshotResponse {
bytes result = 1;
}
message GenesisStateRequest {
// Must be specified if the genesis state is requested from a participant node.
Store filter_domain_store = 1;
// Optional - the effective time used to fetch the topology transactions. If not provided the effective time of the last topology transaction is used.
google.protobuf.Timestamp timestamp = 2;
}
message GenesisStateResponse {
// versioned stored topology transactions
bytes genesis_state_for_sequencer = 1;
}

View File

@ -1,9 +1,9 @@
// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.canton.platform.indexer
package com.digitalasset.canton.config
import com.digitalasset.canton.platform.indexer.PackageMetadataViewConfig.{
import com.digitalasset.canton.config.PackageMetadataViewConfig.{
DefaultInitLoadParallelism,
DefaultInitProcessParallelism,
DefaultInitTakesTooLongInitialDelay,
@ -24,6 +24,4 @@ object PackageMetadataViewConfig {
val DefaultInitProcessParallelism: Int = 16
val DefaultInitTakesTooLongInitialDelay: FiniteDuration = 1.minute
val DefaultInitTakesTooLongInterval: FiniteDuration = 10.seconds
val Default = PackageMetadataViewConfig()
}

View File

@ -878,7 +878,7 @@ object DbStorage {
poolName,
numThreads,
numThreads,
queueSize = config.getIntOr("queueSize", 1000),
queueSize = config.getIntOr("queueSize", 2000),
maxConnections = maxConnections,
registerMbeans = registerMbeans,
logQueryCost = logQueryCost,
@ -893,7 +893,7 @@ object DbStorage {
poolName,
numThreads,
numThreads,
queueSize = config.getIntOr("queueSize", 1000),
queueSize = config.getIntOr("queueSize", 2000),
maxConnections = maxConnections,
registerMbeans = registerMbeans,
)

View File

@ -184,6 +184,21 @@ object TopologyManagerError extends TopologyManagerErrorGroup {
cause = s"Wrong domain $wrong"
)
with TopologyManagerError
final case class InvalidFilterStore(filterStore: String)(implicit
val loggingContext: ErrorLoggingContext
) extends CantonError.Impl(
cause = s"No domain store found for the filter store provided: $filterStore"
)
with TopologyManagerError
final case class MultipleDomainStores(filterStore: String)(implicit
val loggingContext: ErrorLoggingContext
) extends CantonError.Impl(
cause =
s"Multiple domain stores found for the filter store provided: $filterStore. Specify the entire domainID to avoid ambiguity."
)
with TopologyManagerError
}
@Explanation(

View File

@ -9,6 +9,7 @@ import com.daml.ledger.javaapi.data.codegen.ContractTypeCompanion;
import com.daml.ledger.javaapi.data.codegen.InterfaceCompanion;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -30,20 +31,22 @@ public final class ContractFilter<Ct> {
public static <Ct> ContractFilter<Ct> of(ContractCompanion<Ct, ?, ?> companion) {
Filter filter =
new InclusiveFilter(
new CumulativeFilter(
Collections.emptyMap(),
Collections.singletonMap(
companion.TEMPLATE_ID, Filter.Template.HIDE_CREATED_EVENT_BLOB));
companion.TEMPLATE_ID, Filter.Template.HIDE_CREATED_EVENT_BLOB),
Optional.empty());
return new ContractFilter<>(companion, filter);
}
public static <Cid, View> ContractFilter<Contract<Cid, View>> of(
InterfaceCompanion<?, Cid, View> companion) {
Filter filter =
new InclusiveFilter(
new CumulativeFilter(
Collections.singletonMap(
companion.TEMPLATE_ID, Filter.Interface.INCLUDE_VIEW_HIDE_CREATED_EVENT_BLOB),
Collections.emptyMap());
Collections.emptyMap(),
Optional.empty());
return new ContractFilter<>(companion, filter);
}

View File

@ -0,0 +1,161 @@
// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates.
// Proprietary code. All rights reserved.
package com.daml.ledger.javaapi.data;
import com.daml.ledger.api.v2.TransactionFilterOuterClass;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.checkerframework.checker.nullness.qual.NonNull;
public final class CumulativeFilter extends Filter {
private Map<@NonNull Identifier, Filter.@NonNull Interface> interfaceFilters;
private Map<@NonNull Identifier, Filter.@NonNull Template> templateFilters;
private Optional<Filter.@NonNull Wildcard> wildcardFilter;
public CumulativeFilter(
@NonNull Map<@NonNull Identifier, Filter.@NonNull Interface> interfaceFilters,
@NonNull Map<@NonNull Identifier, Filter.@NonNull Template> templateFilters,
Optional<Filter.@NonNull Wildcard> wildcardFilter) {
this.interfaceFilters = interfaceFilters;
this.templateFilters = templateFilters;
this.wildcardFilter = wildcardFilter;
}
@NonNull
public Map<@NonNull Identifier, Filter.@NonNull Interface> getInterfaceFilters() {
return interfaceFilters;
}
@NonNull
public Map<@NonNull Identifier, Filter.@NonNull Template> getTemplateFilters() {
return templateFilters;
}
@NonNull
public Optional<Filter.@NonNull Wildcard> getWildcardFilter() {
return wildcardFilter;
}
@SuppressWarnings("deprecation")
@Override
public TransactionFilterOuterClass.Filters toProto() {
Iterable<TransactionFilterOuterClass.InterfaceFilter> ifaces =
interfaceFilters.entrySet().stream()
.map(idFilt -> idFilt.getValue().toProto(idFilt.getKey()))
.collect(Collectors.toUnmodifiableList());
Iterable<TransactionFilterOuterClass.TemplateFilter> templates =
templateFilters.entrySet().stream()
.map(templateFilter -> templateFilter.getValue().toProto(templateFilter.getKey()))
.collect(Collectors.toUnmodifiableList());
Iterable<TransactionFilterOuterClass.WildcardFilter> wildcard =
wildcardFilter
.map(w -> Collections.singletonList(w.toProto()))
.orElse(Collections.emptyList());
Stream<TransactionFilterOuterClass.CumulativeFilter> cumulativeIfaces =
StreamSupport.stream(ifaces.spliterator(), false)
.map(
ifaceF ->
TransactionFilterOuterClass.CumulativeFilter.newBuilder()
.setInterfaceFilter(ifaceF)
.build());
Stream<TransactionFilterOuterClass.CumulativeFilter> cumulativeTemplates =
StreamSupport.stream(templates.spliterator(), false)
.map(
tempF ->
TransactionFilterOuterClass.CumulativeFilter.newBuilder()
.setTemplateFilter(tempF)
.build());
Stream<TransactionFilterOuterClass.CumulativeFilter> cumulativeWildcard =
StreamSupport.stream(wildcard.spliterator(), false)
.map(
wildF ->
TransactionFilterOuterClass.CumulativeFilter.newBuilder()
.setWildcardFilter(wildF)
.build());
Iterable<TransactionFilterOuterClass.CumulativeFilter> cumulativeFilters =
Stream.concat(Stream.concat(cumulativeIfaces, cumulativeTemplates), cumulativeWildcard)
.collect(Collectors.toUnmodifiableList());
return TransactionFilterOuterClass.Filters.newBuilder()
.addAllCumulative(cumulativeFilters)
.build();
}
@SuppressWarnings("deprecation")
public static CumulativeFilter fromProto(
Iterable<TransactionFilterOuterClass.CumulativeFilter> cumulativeFilters) {
Stream<TransactionFilterOuterClass.InterfaceFilter> intrefaceStream =
StreamSupport.stream(cumulativeFilters.spliterator(), false)
.filter(f -> f.hasInterfaceFilter())
.map(f -> f.getInterfaceFilter());
var interfaceIds =
intrefaceStream.collect(
Collectors.toUnmodifiableMap(
ifFilt -> Identifier.fromProto(ifFilt.getInterfaceId()),
Filter.Interface::fromProto,
Filter.Interface::merge));
Stream<TransactionFilterOuterClass.TemplateFilter> templateStream =
StreamSupport.stream(cumulativeFilters.spliterator(), false)
.filter(f -> f.hasTemplateFilter())
.map(f -> f.getTemplateFilter());
var templateFilters =
templateStream.collect(
Collectors.toUnmodifiableMap(
templateFilter -> Identifier.fromProto(templateFilter.getTemplateId()),
Filter.Template::fromProto,
Filter.Template::merge));
var wildcardFilter =
StreamSupport.stream(cumulativeFilters.spliterator(), false)
.filter(f -> f.hasWildcardFilter())
.map(f -> f.getWildcardFilter())
.map(Filter.Wildcard::fromProto)
.reduce(Filter.Wildcard::merge);
return new CumulativeFilter(interfaceIds, templateFilters, wildcardFilter);
}
@Override
public String toString() {
return "CumulativeFilter{"
+ "interfaceFilters="
+ interfaceFilters
+ ", templateFilters="
+ templateFilters
+ ", wildcardFilter="
+ wildcardFilter
+ '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CumulativeFilter that = (CumulativeFilter) o;
return Objects.equals(interfaceFilters, that.interfaceFilters)
&& Objects.equals(templateFilters, that.templateFilters)
&& Objects.equals(wildcardFilter, that.wildcardFilter);
}
@Override
public int hashCode() {
return Objects.hash(interfaceFilters, templateFilters, wildcardFilter);
}
}

View File

@ -8,20 +8,20 @@ import com.daml.ledger.api.v2.TransactionFilterOuterClass;
public abstract class Filter {
public static Filter fromProto(TransactionFilterOuterClass.Filters filters) {
if (filters.hasInclusive()) {
return InclusiveFilter.fromProto(filters.getInclusive());
} else {
if (filters.getCumulativeList().isEmpty()) {
return NoFilter.instance;
} else {
return CumulativeFilter.fromProto(filters.getCumulativeList());
}
}
public abstract TransactionFilterOuterClass.Filters toProto();
/**
* Settings for including an interface in {@link InclusiveFilter}. There are four possible values:
* {@link #HIDE_VIEW_HIDE_CREATED_EVENT_BLOB} and {@link #INCLUDE_VIEW_HIDE_CREATED_EVENT_BLOB}
* and {@link #HIDE_VIEW_INCLUDE_CREATED_EVENT_BLOB} and {@link
* #INCLUDE_VIEW_INCLUDE_CREATED_EVENT_BLOB}.
* Settings for including an interface in {@link CumulativeFilter}. There are four possible
* values: {@link #HIDE_VIEW_HIDE_CREATED_EVENT_BLOB} and {@link
* #INCLUDE_VIEW_HIDE_CREATED_EVENT_BLOB} and {@link #HIDE_VIEW_INCLUDE_CREATED_EVENT_BLOB} and
* {@link #INCLUDE_VIEW_INCLUDE_CREATED_EVENT_BLOB}.
*/
public static enum Interface {
HIDE_VIEW_HIDE_CREATED_EVENT_BLOB(false, false),
@ -94,4 +94,32 @@ public abstract class Filter {
return includeCreatedEventBlob(includeCreatedEventBlob || other.includeCreatedEventBlob);
}
}
public static enum Wildcard {
INCLUDE_CREATED_EVENT_BLOB(true),
HIDE_CREATED_EVENT_BLOB(false);
public final boolean includeCreatedEventBlob;
Wildcard(boolean includeCreatedEventBlob) {
this.includeCreatedEventBlob = includeCreatedEventBlob;
}
private static Wildcard includeCreatedEventBlob(boolean includeCreatedEventBlob) {
return includeCreatedEventBlob ? INCLUDE_CREATED_EVENT_BLOB : HIDE_CREATED_EVENT_BLOB;
}
public TransactionFilterOuterClass.WildcardFilter toProto() {
return TransactionFilterOuterClass.WildcardFilter.newBuilder()
.setIncludeCreatedEventBlob(includeCreatedEventBlob)
.build();
}
static Wildcard fromProto(TransactionFilterOuterClass.WildcardFilter proto) {
return includeCreatedEventBlob(proto.getIncludeCreatedEventBlob());
}
Wildcard merge(Wildcard other) {
return includeCreatedEventBlob(includeCreatedEventBlob || other.includeCreatedEventBlob);
}
}
}

View File

@ -1,102 +0,0 @@
// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates.
// Proprietary code. All rights reserved.
package com.daml.ledger.javaapi.data;
import com.daml.ledger.api.v2.TransactionFilterOuterClass;
import com.daml.ledger.api.v2.ValueOuterClass;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.checkerframework.checker.nullness.qual.NonNull;
public final class InclusiveFilter extends Filter {
private Map<@NonNull Identifier, Filter.@NonNull Interface> interfaceFilters;
private Map<@NonNull Identifier, Filter.@NonNull Template> templateFilters;
public InclusiveFilter(
@NonNull Map<@NonNull Identifier, Filter.@NonNull Interface> interfaceFilters,
@NonNull Map<@NonNull Identifier, Filter.@NonNull Template> templateFilters) {
this.interfaceFilters = interfaceFilters;
this.templateFilters = templateFilters;
}
@NonNull
public Map<@NonNull Identifier, Filter.@NonNull Interface> getInterfaceFilters() {
return interfaceFilters;
}
@NonNull
public Map<@NonNull Identifier, Filter.@NonNull Template> getTemplateFilters() {
return templateFilters;
}
@SuppressWarnings("deprecation")
@Override
public TransactionFilterOuterClass.Filters toProto() {
TransactionFilterOuterClass.InclusiveFilters inclusiveFilter =
TransactionFilterOuterClass.InclusiveFilters.newBuilder()
.addAllInterfaceFilters(
interfaceFilters.entrySet().stream()
.map(idFilt -> idFilt.getValue().toProto(idFilt.getKey()))
.collect(Collectors.toUnmodifiableList()))
.addAllTemplateFilters(
templateFilters.entrySet().stream()
.map(
templateFilter ->
templateFilter.getValue().toProto(templateFilter.getKey()))
.collect(Collectors.toUnmodifiableList()))
.build();
return TransactionFilterOuterClass.Filters.newBuilder().setInclusive(inclusiveFilter).build();
}
@SuppressWarnings("deprecation")
public static InclusiveFilter fromProto(
TransactionFilterOuterClass.InclusiveFilters inclusiveFilters) {
var interfaceIds =
inclusiveFilters.getInterfaceFiltersList().stream()
.collect(
Collectors.toUnmodifiableMap(
ifFilt -> Identifier.fromProto(ifFilt.getInterfaceId()),
Filter.Interface::fromProto,
Filter.Interface::merge));
var templateFilters =
inclusiveFilters.getTemplateFiltersList().stream()
.collect(
Collectors.toUnmodifiableMap(
templateFilter -> Identifier.fromProto(templateFilter.getTemplateId()),
Filter.Template::fromProto,
Filter.Template::merge));
return new InclusiveFilter(interfaceIds, templateFilters);
}
@Override
public String toString() {
return "InclusiveFilter{"
+ "interfaceFilters="
+ interfaceFilters
+ ", templateFilters="
+ templateFilters
+ '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InclusiveFilter that = (InclusiveFilter) o;
return Objects.equals(interfaceFilters, that.interfaceFilters)
&& Objects.equals(templateFilters, that.templateFilters);
}
@Override
public int hashCode() {
return Objects.hash(interfaceFilters, templateFilters);
}
}

View File

@ -9,6 +9,7 @@ import com.daml.ledger.javaapi.data.codegen.ContractTypeCompanion;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -17,7 +18,8 @@ public abstract class TransactionFilter {
public static TransactionFilter fromProto(
TransactionFilterOuterClass.TransactionFilter transactionFilter) {
// at the moment, the only transaction filter supported is FiltersByParty
// at the moment, the only transaction filter supported is FiltersByParty // TODO(#19364)
// support FiltersForAnyParty
return FiltersByParty.fromProto(transactionFilter);
}
@ -29,15 +31,17 @@ public abstract class TransactionFilter {
ContractTypeCompanion<?, ?, ?, ?> contractCompanion, Set<String> parties) {
Filter filter =
(contractCompanion instanceof ContractCompanion)
? new InclusiveFilter(
? new CumulativeFilter(
Collections.emptyMap(),
Collections.singletonMap(
contractCompanion.TEMPLATE_ID, Filter.Template.HIDE_CREATED_EVENT_BLOB))
: new InclusiveFilter(
contractCompanion.TEMPLATE_ID, Filter.Template.HIDE_CREATED_EVENT_BLOB),
Optional.empty())
: new CumulativeFilter(
Map.of(
contractCompanion.TEMPLATE_ID,
Filter.Interface.INCLUDE_VIEW_HIDE_CREATED_EVENT_BLOB),
Collections.emptyMap());
Collections.emptyMap(),
Optional.empty());
Map<String, Filter> partyToFilters =
parties.stream().collect(Collectors.toMap(Function.identity(), x -> filter));
return new FiltersByParty(partyToFilters);

View File

@ -4,7 +4,6 @@
package com.daml.ledger.javaapi.data
import com.daml.ledger.api.*
import com.daml.ledger.api.v2.TransactionFilterOuterClass
import com.google.protobuf.{ByteString, Empty}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.{Arbitrary, Gen}
@ -335,29 +334,48 @@ object Generators {
def filtersGen: Gen[v2.TransactionFilterOuterClass.Filters] =
for {
inclusive <- inclusiveGen
cumulatives <- cumulativeGen
} yield v2.TransactionFilterOuterClass.Filters
.newBuilder()
.setInclusive(inclusive)
.addAllCumulative(cumulatives.asJava)
.build()
def inclusiveGen: Gen[v2.TransactionFilterOuterClass.InclusiveFilters] =
def cumulativeGen: Gen[List[v2.TransactionFilterOuterClass.CumulativeFilter]] =
for {
templateIds <- Gen.listOf(identifierGen)
interfaceFilters <- Gen.listOf(interfaceFilterGen)
} yield v2.TransactionFilterOuterClass.InclusiveFilters
.newBuilder()
.addAllTemplateFilters(
templateIds
.map(templateId =>
TransactionFilterOuterClass.TemplateFilter.newBuilder
.setTemplateId(templateId)
.build
)
.asJava
)
.addAllInterfaceFilters(interfaceFilters.asJava)
.build()
wildcardFilterO <- Gen.option(wildcardFilterGen)
} yield {
templateIds
.map(templateId =>
v2.TransactionFilterOuterClass.CumulativeFilter
.newBuilder()
.setTemplateFilter(
v2.TransactionFilterOuterClass.TemplateFilter.newBuilder
.setTemplateId(templateId)
.build
)
.build()
)
++
interfaceFilters
.map(interfaceFilter =>
v2.TransactionFilterOuterClass.CumulativeFilter
.newBuilder()
.setInterfaceFilter(interfaceFilter)
.build()
)
++ (wildcardFilterO match {
case Some(wildcardFilter) =>
Seq(
v2.TransactionFilterOuterClass.CumulativeFilter
.newBuilder()
.setWildcardFilter(wildcardFilter)
.build()
)
case None => Seq.empty
})
}
private[this] def interfaceFilterGen: Gen[v2.TransactionFilterOuterClass.InterfaceFilter] =
Gen.zip(identifierGen, arbitrary[Boolean]).map { case (interfaceId, includeInterfaceView) =>
@ -368,6 +386,14 @@ object Generators {
.build()
}
private[this] def wildcardFilterGen: Gen[v2.TransactionFilterOuterClass.WildcardFilter] =
arbitrary[Boolean].map { includeBlob =>
v2.TransactionFilterOuterClass.WildcardFilter
.newBuilder()
.setIncludeCreatedEventBlob(includeBlob)
.build()
}
def getActiveContractRequestGen: Gen[v2.StateServiceOuterClass.GetActiveContractsRequest] =
for {
transactionFilter <- transactionFilterGen

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: CantonExamples

View File

@ -584,6 +584,7 @@ abstract class CantonNodeBootstrapImpl[
adminV30.TopologyManagerReadServiceGrpc
.bindService(
new GrpcTopologyManagerReadService(
member(nodeId),
sequencedTopologyStores :+ authorizedStore,
crypto,
lookupTopologyClient,

View File

@ -5,7 +5,7 @@ package com.digitalasset.canton.protocol
import com.daml.lf.data.Ref.PackageId
import com.digitalasset.canton.LfPackageId
import com.digitalasset.canton.config.CantonRequireTypes.String256M
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.tracing.TraceContext
import slick.jdbc.GetResult
@ -20,7 +20,7 @@ import scala.concurrent.Future
*/
final case class PackageDescription(
packageId: LfPackageId,
sourceDescription: String256M,
sourceDescription: String255,
uploadedAt: CantonTimestamp,
packageSize: Int,
)
@ -33,7 +33,7 @@ object PackageDescription {
GetResult
.createGetTuple4(
GetResult[LfPackageId],
GetResult[String256M],
GetResult[String255],
GetResult[CantonTimestamp],
GetResult[Int],
)

View File

@ -4,6 +4,7 @@
package com.digitalasset.canton.topology.admin.grpc
import cats.data.EitherT
import cats.implicits.catsSyntaxEitherId
import cats.syntax.parallel.*
import cats.syntax.traverse.*
import com.daml.nonempty.NonEmpty
@ -18,19 +19,56 @@ import com.digitalasset.canton.networking.grpc.CantonGrpcUtil.{wrapErr, wrapErrU
import com.digitalasset.canton.protocol.v30
import com.digitalasset.canton.serialization.ProtoConverter
import com.digitalasset.canton.serialization.ProtoConverter.ParsingResult
import com.digitalasset.canton.topology.admin.v30.*
import com.digitalasset.canton.topology.admin.v30.{
ExportTopologySnapshotRequest,
ExportTopologySnapshotResponse,
GenesisStateRequest,
GenesisStateResponse,
ListPartyHostingLimitsRequest,
ListPartyHostingLimitsResponse,
ListPurgeTopologyTransactionRequest,
ListPurgeTopologyTransactionResponse,
*,
}
import com.digitalasset.canton.topology.admin.v30 as adminProto
import com.digitalasset.canton.topology.client.DomainTopologyClient
import com.digitalasset.canton.topology.processing.{EffectiveTime, SequencedTime}
import com.digitalasset.canton.topology.store.StoredTopologyTransactions.GenericStoredTopologyTransactions
import com.digitalasset.canton.topology.store.TopologyStoreId.DomainStore
import com.digitalasset.canton.topology.store.{
StoredTopologyTransaction,
StoredTopologyTransactions,
TimeQuery,
TopologyStoreId,
}
import com.digitalasset.canton.topology.transaction.*
import com.digitalasset.canton.topology.{DomainId, UniqueIdentifier}
import com.digitalasset.canton.topology.transaction.{
AuthorityOf,
DecentralizedNamespaceDefinition,
DomainParametersState,
DomainTrustCertificate,
IdentifierDelegation,
MediatorDomainState,
NamespaceDelegation,
OwnerToKeyMapping,
ParticipantDomainPermission,
PartyHostingLimits,
PartyToParticipant,
PurgeTopologyTransaction,
SequencerDomainState,
SignedTopologyTransaction,
TopologyChangeOp,
TopologyMapping,
VettedPackages,
*,
}
import com.digitalasset.canton.topology.{
DomainId,
Member,
ParticipantId,
TopologyManagerError,
UniqueIdentifier,
store,
}
import com.digitalasset.canton.tracing.{TraceContext, TraceContextGrpc}
import com.digitalasset.canton.util.FutureInstances.*
import com.digitalasset.canton.util.{EitherTUtil, OptionUtil}
@ -138,6 +176,7 @@ object TopologyStore {
}
class GrpcTopologyManagerReadService(
member: Member,
stores: => Seq[topology.store.TopologyStore[TopologyStoreId]],
crypto: Crypto,
topologyClientLookup: TopologyStoreId => Option[DomainTopologyClient],
@ -166,6 +205,36 @@ class GrpcTopologyManagerReadService(
case None => EitherT.rightT(stores)
}
private def collectDomainStore(
filterStoreO: Option[TopologyStore]
)(implicit
traceContext: TraceContext
): EitherT[Future, CantonError, topology.store.TopologyStore[TopologyStoreId]] = {
val domainStores: Either[CantonError, store.TopologyStore[TopologyStoreId]] =
filterStoreO match {
case Some(filterStore) =>
val domainStores = stores.filter { s =>
s.storeId.isDomainStore && s.storeId.filterName.startsWith(filterStore.filterString)
}
domainStores match {
case Nil =>
TopologyManagerError.WrongDomain.InvalidFilterStore(filterStore.filterString).asLeft
case Seq(domainStore) => domainStore.asRight
case _ =>
TopologyManagerError.WrongDomain.MultipleDomainStores(filterStore.filterString).asLeft
}
case None =>
stores.find(_.storeId.isDomainStore) match {
case Some(domainStore) => domainStore.asRight
case None => TopologyManagerError.InternalError.Other("No domain store found").asLeft
}
}
EitherT.fromEither[Future](domainStores)
}
private def createBaseResult(context: TransactionSearchResult): adminProto.BaseResult = {
val storeProto: adminProto.Store = context.store match {
case DomainStore(domainId, _) =>
@ -738,6 +807,72 @@ class GrpcTopologyManagerReadService(
res
}
override def genesisState(request: GenesisStateRequest): Future[GenesisStateResponse] = {
implicit val traceContext: TraceContext = TraceContextGrpc.fromGrpcContext
val result: EitherT[Future, CantonError, GenesisStateResponse] = for {
_ <- member match {
case _: ParticipantId =>
wrapErr(
ProtoConverter
.required("filter_domain_store", request.filterDomainStore)
)
case _ => EitherT.rightT[Future, CantonError](())
}
topologyStoreO <- wrapErr(
request.filterDomainStore.traverse(TopologyStore.fromProto(_, "filter_domain_store"))
)
domainTopologyStore <- collectDomainStore(topologyStoreO)
timestampO <- wrapErr(
request.timestamp
.traverse(CantonTimestamp.fromProtoTimestamp)
)
sequencedTimestamp <- timestampO match {
case Some(value) => EitherT.rightT[Future, CantonError](value)
case None =>
val sequencedTimeF = domainTopologyStore
.maxTimestamp()
.collect {
case Some((sequencedTime, _)) =>
Right(sequencedTime.value)
case None =>
Left(
TopologyManagerError.InternalError.Other(s"No sequenced time found")
)
}
EitherT(sequencedTimeF)
}
// we exclude TrafficControlState from the genesis state because this mapping will be deleted.
topologySnapshot <- EitherT.right[CantonError](
domainTopologyStore.findEssentialStateAtSequencedTime(
SequencedTime(sequencedTimestamp),
excludeMappings = Seq(TopologyMapping.Code.TrafficControlState),
)
)
// reset effective time and sequenced time if we are initializing the sequencer from the beginning
genesisState: StoredTopologyTransactions[TopologyChangeOp, TopologyMapping] =
StoredTopologyTransactions[TopologyChangeOp, TopologyMapping](
topologySnapshot.result.map(stored =>
StoredTopologyTransaction(
SequencedTime(SignedTopologyTransaction.InitialTopologySequencingTime),
EffectiveTime(SignedTopologyTransaction.InitialTopologySequencingTime),
stored.validUntil.map(_ =>
EffectiveTime(SignedTopologyTransaction.InitialTopologySequencingTime)
),
stored.transaction,
)
)
)
} yield GenesisStateResponse(genesisState.toByteString(ProtocolVersion.latest))
CantonGrpcUtil.mapErrNew(result)
}
override def listPurgeTopologyTransaction(
request: ListPurgeTopologyTransactionRequest
): Future[ListPurgeTopologyTransactionResponse] = {

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: ai-analysis

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: bank

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: doctor

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: health-insurance

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: medical-records

View File

@ -34,10 +34,6 @@ service SequencerAdministrationService {
// the returned bytestring can be used directly to initialize the given sequencer later on
rpc OnboardingState(OnboardingStateRequest) returns (OnboardingStateResponse);
// Fetch the genesis state for a given sequencer.
// the returned bytestring can be used directly to initialize the given sequencer later on
rpc GenesisState(GenesisStateRequest) returns (GenesisStateResponse);
// Disable members at the sequencer. Will prevent existing and new instances from connecting, and permit removing their data.
rpc DisableMember(DisableMemberRequest) returns (DisableMemberResponse);
}
@ -108,24 +104,6 @@ message OnboardingStateResponse {
Failure failure = 2;
}
}
message GenesisStateRequest {
// Optional - the effective time used to fetch the topology transactions. If not provided the effective time of the last topology transaction is used.
google.protobuf.Timestamp timestamp = 1;
}
message GenesisStateResponse {
message Success {
// versioned stored topology transactions
bytes genesis_state_for_sequencer = 1;
}
message Failure {
string reason = 1;
}
oneof value {
Success success = 1;
Failure failure = 2;
}
}
message OnboardingStateForSequencer {
option (scalapb.message).companion_extends = "com.digitalasset.canton.version.UnstableProtoVersion";

View File

@ -28,17 +28,8 @@ import com.digitalasset.canton.serialization.ProtoConverter
import com.digitalasset.canton.time.DomainTimeTracker
import com.digitalasset.canton.topology.client.DomainTopologyClient
import com.digitalasset.canton.topology.processing.{EffectiveTime, SequencedTime}
import com.digitalasset.canton.topology.store.TopologyStore
import com.digitalasset.canton.topology.store.TopologyStoreId.DomainStore
import com.digitalasset.canton.topology.store.{
StoredTopologyTransaction,
StoredTopologyTransactions,
TopologyStore,
}
import com.digitalasset.canton.topology.transaction.{
SignedTopologyTransaction,
TopologyChangeOp,
TopologyMapping,
}
import com.digitalasset.canton.topology.{Member, SequencerId}
import com.digitalasset.canton.tracing.{TraceContext, TraceContextGrpc}
import com.digitalasset.canton.util.EitherTUtil
@ -199,69 +190,6 @@ class GrpcSequencerAdministrationService(
)
}
override def genesisState(request: v30.GenesisStateRequest): Future[v30.GenesisStateResponse] = {
implicit val traceContext: TraceContext = TraceContextGrpc.fromGrpcContext
val result = for {
timestampO <- EitherT
.fromEither[Future](
request.timestamp.traverse(CantonTimestamp.fromProtoTimestamp)
)
.leftMap(_.toString)
sequencedTimestamp <- timestampO match {
case Some(value) => EitherT.rightT[Future, String](value)
case None =>
val sequencedTimeF = topologyStore
.maxTimestamp()
.collect {
case Some((sequencedTime, _)) =>
Right(sequencedTime.value)
case None => Left("No sequenced time found")
}
EitherT(sequencedTimeF)
}
// we exclude TrafficControlState from the genesis state because this mapping will be deleted.
topologySnapshot <- EitherT.right[String](
topologyStore.findEssentialStateAtSequencedTime(
SequencedTime(sequencedTimestamp),
excludeMappings = Seq(TopologyMapping.Code.TrafficControlState),
)
)
// reset effective time and sequenced time if we are initializing the sequencer from the beginning
genesisState: StoredTopologyTransactions[TopologyChangeOp, TopologyMapping] =
StoredTopologyTransactions[TopologyChangeOp, TopologyMapping](
topologySnapshot.result.map(stored =>
StoredTopologyTransaction(
SequencedTime(SignedTopologyTransaction.InitialTopologySequencingTime),
EffectiveTime(SignedTopologyTransaction.InitialTopologySequencingTime),
stored.validUntil.map(_ =>
EffectiveTime(SignedTopologyTransaction.InitialTopologySequencingTime)
),
stored.transaction,
)
)
)
} yield genesisState.toByteString(staticDomainParameters.protocolVersion)
result
.fold[v30.GenesisStateResponse](
error =>
v30.GenesisStateResponse(
v30.GenesisStateResponse.Value.Failure(v30.GenesisStateResponse.Failure(error))
),
result =>
v30.GenesisStateResponse(
v30.GenesisStateResponse.Value.Success(
v30.GenesisStateResponse.Success(result)
)
),
)
}
override def disableMember(
requestP: v30.DisableMemberRequest
): Future[v30.DisableMemberResponse] = {

View File

@ -53,11 +53,10 @@ message CreatedEvent {
// The arguments that have been used to create the contract.
// Set either:
// - if there was a party, which is in the ``witness_parties`` of this event,
// and for which an ``InclusiveFilters`` exists with the ``template_id`` of this event
// and for which a ``CumulativeFilter`` exists with the ``template_id`` of this event
// among the ``template_filters``,
// - or if there was a party, which is in the ``witness_parties`` of this event,
// and for which a wildcard filter exists (``Filters`` without ``InclusiveFilters``,
// or with an ``InclusiveFilters`` with empty ``template_filters`` and empty ``interface_filters``).
// and for which a wildcard filter exists (``Filters`` with a ``CumulativeFilter`` of ``WildcardFilter``).
// Optional
Record create_arguments = 5;

View File

@ -11,31 +11,48 @@ option csharp_namespace = "Com.Daml.Ledger.Api.V2";
option java_outer_classname = "TransactionFilterOuterClass";
option java_package = "com.daml.ledger.api.v2";
// The union of a set of contract filters, or a wildcard.
// The union of a set of template filters, interface filters, or a wildcard.
message Filters {
// If set, then contracts matching any of the ``InclusiveFilters`` match
// this filter.
// If not set, or if ``InclusiveFilters`` has empty ``template_filters`` and empty ``interface_filters``:
// any contract matches this filter.
// Every filter in the cumulative list expands the scope of the resulting stream. Each interface,
// template or wildcard filter means additional events that will match the query.
// The impact of include_interface_view and include_created_event_blob fields in the filters will
// also be accumulated.
// At least one cumulative filter MUST be specified.
// A template or an interface SHOULD NOT appear twice in the accumulative field.
// A wildcard filter SHOULD NOT be defined more than once in the accumulative field.
// Optional
InclusiveFilters inclusive = 1;
repeated CumulativeFilter cumulative = 1;
}
// A filter that matches all contracts that are either an instance of one of
// the ``template_filters`` or that match one of the ``interface_filters``.
message InclusiveFilters {
// Include an ``InterfaceView`` for every ``InterfaceFilter`` matching a contract.
// The ``InterfaceFilter`` instances MUST each use a unique ``interface_id``.
// Optional
repeated InterfaceFilter interface_filters = 1;
message CumulativeFilter {
oneof identifier_filter {
// A wildcard filter that matches all templates
// Optional
WildcardFilter wildcard_filter = 1;
// A collection of templates for which the data will be included in the
// ``create_arguments`` of a matching ``CreatedEvent``.
// SHOULD NOT contain duplicate templates.
// If a contract is simultaneously selected by a template filter and one or more interface filters,
// the corresponding ``include_created_event_blob`` are consolidated using an OR operation.
// Include an ``InterfaceView`` for every ``InterfaceFilter`` matching a contract.
// The ``InterfaceFilter`` instances MUST each use a unique ``interface_id``.
// Optional
InterfaceFilter interface_filter = 2;
// A template for which the data will be included in the
// ``create_arguments`` of a matching ``CreatedEvent``.
// If a contract is simultaneously selected by a template filter and one or more interface filters,
// the corresponding ``include_created_event_blob`` are consolidated using an OR operation.
// Optional
TemplateFilter template_filter = 3;
}
}
// This filter matches all templates.
message WildcardFilter {
// Whether to include a ``created_event_blob`` in the returned ``CreatedEvent``.
// Use this to access the contract create event payload in your API client
// for submitting it as a disclosed contract with future commands.
// Optional
repeated TemplateFilter template_filters = 2;
bool include_created_event_blob = 1;
}
// This filter matches contracts that implement a specific interface.
@ -80,8 +97,8 @@ message TransactionFilter {
// The interpretation of the filter depends on the stream being filtered:
// (1) For **transaction tree streams** all party keys used as wildcard filters, and all subtrees
// whose root has one of the listed parties as an informee are returned.
// If there are InclusiveFilters, those will control returned ``CreatedEvent`` fields where applicable, but not
// used for template/interface filtering.
// If there are ``CumulativeFilter``s, those will control returned ``CreatedEvent`` fields where applicable, but will
// not be used for template/interface filtering.
// (2) For **transaction and active-contract-set streams** create and archive events are returned for all contracts whose
// stakeholders include at least one of the listed parties and match the
// per-party filter.
@ -91,7 +108,7 @@ message TransactionFilter {
// Wildcard filters that apply to all the parties existing on the participant. The interpretation of the filters is the same
// with the per-party filter as described above.
// (1) For **transaction tree streams** all subtrees are returned.
// If there are InclusiveFilters, those will control returned ``CreatedEvent`` fields where applicable, but not
// If there are ``CumulativeFilter``s, those will control returned ``CreatedEvent`` fields where applicable, but not
// used for template/interface filtering.
// (2) For **transaction and active-contract-set streams** create and archive events are returned for all contracts whose
// stakeholders include at least one of the participant's parties and match the filter.

View File

@ -22,15 +22,27 @@ import scala.collection.immutable
final case class TransactionFilter(
filtersByParty: immutable.Map[Ref.Party, Filters],
filtersForAnyParty: Option[Filters] = None,
alwaysPopulateCreatedEventBlob: Boolean = false,
)
final case class Filters(inclusive: Option[InclusiveFilters])
final case class Filters(
cumulative: Option[CumulativeFilter]
) //TODO(#19364) remove Option and use the wildcardFilter for party-wildcards
object Filters {
val noFilter: Filters = Filters(None)
def apply(inclusive: InclusiveFilters) = new Filters(Some(inclusive))
def templateWildcardFilter(includeCreatedEventBlob: Boolean = false): Filters = Filters(
Some(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter =
Some(TemplateWildcardFilter(includeCreatedEventBlob = includeCreatedEventBlob)),
)
)
)
def apply(cumulative: CumulativeFilter) = new Filters(Some(cumulative))
}
final case class InterfaceFilter(
@ -44,6 +56,10 @@ final case class TemplateFilter(
includeCreatedEventBlob: Boolean,
)
final case class TemplateWildcardFilter(
includeCreatedEventBlob: Boolean
)
object TemplateFilter {
def apply(templateId: Ref.Identifier, includeCreatedEventBlob: Boolean): TemplateFilter =
TemplateFilter(
@ -52,9 +68,10 @@ object TemplateFilter {
)
}
final case class InclusiveFilters(
final case class CumulativeFilter(
templateFilters: immutable.Set[TemplateFilter],
interfaceFilters: immutable.Set[InterfaceFilter],
templateWildcardFilter: Option[TemplateWildcardFilter],
)
sealed abstract class ParticipantOffset extends Product with Serializable

View File

@ -1,14 +0,0 @@
// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.canton.ledger.api.messages.transaction
import com.daml.lf.data.Ref.Party
import com.digitalasset.canton.ledger.api.domain.ParticipantOffset
final case class GetUpdateTreesRequest(
startExclusive: ParticipantOffset,
endInclusive: Option[ParticipantOffset],
parties: Set[Party],
verbose: Boolean,
)

View File

@ -4,11 +4,13 @@
package com.digitalasset.canton.ledger.api.validation
import com.daml.error.ContextualizedErrorLogger
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.{
Filters,
InterfaceFilter,
TemplateFilter,
TransactionFilter,
WildcardFilter,
}
import com.digitalasset.canton.ledger.api.domain
import com.digitalasset.canton.ledger.api.validation.ValueValidator.*
@ -51,24 +53,39 @@ object TransactionFilterValidator {
// Allow using deprecated Protobuf fields for backwards compatibility
private def validateFilters(filters: Filters)(implicit
contextualizedErrorLogger: ContextualizedErrorLogger
): Either[StatusRuntimeException, domain.Filters] =
filters.inclusive
.fold[Either[StatusRuntimeException, domain.Filters]](Right(domain.Filters.noFilter)) {
inclusive =>
for {
validatedTemplates <-
inclusive.templateFilters.toList.traverse(validateTemplateFilter(_))
validatedInterfaces <-
inclusive.interfaceFilters.toList traverse validateInterfaceFilter
} yield domain.Filters(
Some(
domain.InclusiveFilters(
validatedTemplates.toSet,
validatedInterfaces.toSet,
)
)
): Either[StatusRuntimeException, domain.Filters] = {
val extractedFilters = filters.cumulative.map(_.identifierFilter)
val empties = extractedFilters.filter(_.isEmpty)
lazy val templateFilters = extractedFilters.collect({ case IdentifierFilter.TemplateFilter(f) =>
f
})
lazy val interfaceFilters = extractedFilters.collect({
case IdentifierFilter.InterfaceFilter(f) =>
f
})
lazy val wildcardFilters = extractedFilters.collect({ case IdentifierFilter.WildcardFilter(f) =>
f
})
if (empties.size == extractedFilters.size) Right(domain.Filters(None))
else {
for {
validatedTemplates <-
templateFilters.toList.traverse(validateTemplateFilter(_))
validatedInterfaces <-
interfaceFilters.toList.traverse(validateInterfaceFilter(_))
wildcardO = mergeWildcardFilters(wildcardFilters)
} yield domain.Filters(
Some(
domain.CumulativeFilter(
validatedTemplates.toSet,
validatedInterfaces.toSet,
wildcardO,
)
}
)
)
}
}
private def validateTemplateFilter(filter: TemplateFilter)(implicit
contextualizedErrorLogger: ContextualizedErrorLogger
@ -93,4 +110,16 @@ object TransactionFilterValidator {
includeCreatedEventBlob = filter.includeCreatedEventBlob,
)
}
private def mergeWildcardFilters(
filters: Seq[WildcardFilter]
): Option[domain.TemplateWildcardFilter] =
if (filters.isEmpty) None
else
Some(
domain.TemplateWildcardFilter(
includeCreatedEventBlob = filters.exists(_.includeCreatedEventBlob)
)
)
}

View File

@ -14,7 +14,6 @@ import com.daml.lf.data.Ref
import com.digitalasset.canton.ledger.api.domain
import com.digitalasset.canton.ledger.api.domain.ParticipantOffset
import com.digitalasset.canton.ledger.api.messages.transaction
import com.digitalasset.canton.ledger.api.messages.transaction.GetUpdateTreesRequest
import com.digitalasset.canton.ledger.api.validation.ValueValidator.*
import io.grpc.StatusRuntimeException
@ -83,36 +82,6 @@ class UpdateServiceRequestValidator(partyValidator: PartyValidator) {
}
}
def validateTree(
req: GetUpdatesRequest,
ledgerEnd: ParticipantOffset.Absolute,
)(implicit
contextualizedErrorLogger: ContextualizedErrorLogger
): Result[GetUpdateTreesRequest] = {
for {
partial <- commonValidations(req)
_ <- ParticipantOffsetValidator.offsetIsBeforeEndIfAbsolute(
"Begin",
partial.begin,
ledgerEnd,
)
_ <- ParticipantOffsetValidator.offsetIsBeforeEndIfAbsolute(
"End",
partial.end,
ledgerEnd,
)
convertedFilter <- transactionFilterToPartySet(partial.transactionFilter)
} yield {
transaction.GetUpdateTreesRequest(
partial.begin,
partial.end,
convertedFilter,
req.verbose,
)
}
}
def validateTransactionById(
req: GetTransactionByIdRequest
)(implicit
@ -153,10 +122,11 @@ class UpdateServiceRequestValidator(partyValidator: PartyValidator) {
transactionFilter: TransactionFilter
)(implicit contextualizedErrorLogger: ContextualizedErrorLogger) =
transactionFilter.filtersByParty
.collectFirst { case (party, Filters(Some(inclusive))) =>
invalidArgument(
s"$party attempted subscription for templates. Template filtration is not supported on GetTransactionTrees RPC. To get filtered data, use the GetTransactions RPC."
)
.collectFirst {
case (party, Filters(cumulative)) if cumulative.nonEmpty =>
invalidArgument(
s"$party attempted subscription for templates. Template filtration is not supported on GetTransactionTrees RPC. To get filtered data, use the GetTransactions RPC."
)
}
.fold(partyValidator.requireKnownParties(transactionFilter.filtersByParty.keys))(Left(_))

View File

@ -8,8 +8,8 @@ import com.daml.error.ContextualizedErrorLogger
import com.daml.lf.data.Ref.PackageId
import com.digitalasset.canton.data.Offset
import com.digitalasset.canton.ledger.api.health.ReportsHealth
import com.digitalasset.canton.ledger.participant.state.index.PackageDetails
import com.digitalasset.canton.platform.store.packagemeta.PackageMetadata
import com.digitalasset.canton.protocol.PackageDescription
import com.digitalasset.canton.topology.DomainId
import com.digitalasset.canton.topology.transaction.ParticipantPermission
import com.digitalasset.canton.tracing.{TraceContext, Traced}
@ -169,7 +169,7 @@ trait ReadService extends ReportsHealth with InternalStateServiceProvider {
def listLfPackages()(implicit
traceContext: TraceContext
): Future[Map[PackageId, PackageDetails]] =
): Future[Seq[PackageDescription]] =
throw new UnsupportedOperationException()
def getLfArchive(packageId: PackageId)(implicit

View File

@ -62,21 +62,4 @@ package index {
recordTime: Timestamp,
workflowId: WorkflowId,
)
/** Meta-data of a Daml-LF package
*
* @param size : The size of the archive payload, in bytes.
*
* @param knownSince : Indicates since when the package is known to
* the backing participant.
*
* @param sourceDescription : Optional description provided by the backing
* participant describing where it got the package from.
*/
// TODO(#17635): Consider using PackageDescription instead
final case class PackageDetails(
size: Long,
knownSince: Timestamp,
sourceDescription: Option[String],
)
}

View File

@ -10,7 +10,6 @@ import com.daml.metrics.Timed
import com.digitalasset.canton.LfPartyId
import com.digitalasset.canton.data.Offset
import com.digitalasset.canton.ledger.api.health.HealthStatus
import com.digitalasset.canton.ledger.participant.state.index.PackageDetails
import com.digitalasset.canton.ledger.participant.state.{
InternalStateService,
ReadService,
@ -19,6 +18,7 @@ import com.digitalasset.canton.ledger.participant.state.{
}
import com.digitalasset.canton.metrics.LedgerApiServerMetrics
import com.digitalasset.canton.platform.store.packagemeta.PackageMetadata
import com.digitalasset.canton.protocol.PackageDescription
import com.digitalasset.canton.tracing.{TraceContext, Traced}
import com.google.protobuf.ByteString
import org.apache.pekko.NotUsed
@ -69,7 +69,7 @@ final class TimedReadService(delegate: ReadService, metrics: LedgerApiServerMetr
override def listLfPackages()(implicit
traceContext: TraceContext
): Future[Map[PackageId, PackageDetails]] =
): Future[Seq[PackageDescription]] =
Timed.future(
metrics.services.read.listLfPackages,
delegate.listLfPackages(),

View File

@ -24,8 +24,8 @@ import com.digitalasset.canton.platform.config.{
TransactionTreeStreamsConfig,
UserManagementServiceConfig,
}
import com.digitalasset.canton.platform.indexer.IndexerConfig
import com.digitalasset.canton.platform.indexer.ha.HaConfig
import com.digitalasset.canton.platform.indexer.{IndexerConfig, PackageMetadataViewConfig}
import com.digitalasset.canton.platform.store.DbSupport.{
ConnectionPoolConfig,
DataSourceProperties,
@ -185,12 +185,6 @@ class PureConfigReaderWriter(secure: Boolean = true) {
implicit val participantIdWriter: ConfigWriter[Ref.ParticipantId] =
ConfigWriter.toString[Ref.ParticipantId](_.toString)
implicit val packageMetadataViewConfigHint: ProductHint[PackageMetadataViewConfig] =
ProductHint[PackageMetadataViewConfig](allowUnknownKeys = false)
implicit val packageMetadataViewConfigConvert: ConfigConvert[PackageMetadataViewConfig] =
deriveConvert[PackageMetadataViewConfig]
implicit val indexerConfigHint: ProductHint[IndexerConfig] =
ProductHint[IndexerConfig](allowUnknownKeys = false)

View File

@ -64,7 +64,7 @@ private[apiserver] final class ApiPackageService(
logger.info(s"Received request to list packages: $request")
readService
.listLfPackages()
.map(p => ListPackagesResponse(p.keys.toSeq))
.map(p => ListPackagesResponse(p.map(_.packageId.toString)))
.andThen(logger.logErrorsOnCall[ListPackagesResponse])
}
@ -100,16 +100,17 @@ private[apiserver] final class ApiPackageService(
) { implicit loggingContext =>
logger.info(s"Received request for a package status: $request")
withValidatedPackageId(request.packageId, request) { packageId =>
readService
.listLfPackages()
.map { packages =>
val result = if (packages.contains(packageId)) {
Future {
val result =
if (
readService.getPackageMetadataSnapshot.packageIdVersionMap.keySet.contains(packageId)
) {
PackageStatus.PACKAGE_STATUS_REGISTERED
} else {
PackageStatus.PACKAGE_STATUS_UNSPECIFIED
}
GetPackageStatusResponse(result)
}
GetPackageStatusResponse(result)
}
.andThen(logger.logErrorsOnCall[GetPackageStatusResponse])
}
}

View File

@ -70,9 +70,10 @@ final class ApiUpdateService(
validation.fold(
t => Source.failed(ValidationLogger.logFailureWithTrace(logger, request, t)),
req =>
if (req.filter.filtersByParty.isEmpty && req.filter.filtersForAnyParty.isEmpty)
if (req.filter.filtersByParty.isEmpty && req.filter.filtersForAnyParty.isEmpty) {
logger.debug("transaction filters were empty, will not return anything")
Source.empty
else {
} else {
LoggingContextWithTrace.withEnrichedLoggingContext(
logging.startExclusive(req.startExclusive),
logging.endInclusive(req.endInclusive),
@ -119,9 +120,10 @@ final class ApiUpdateService(
validation.fold(
t => Source.failed(ValidationLogger.logFailureWithTrace(logger, request, t)),
req =>
if (req.filter.filtersByParty.isEmpty && req.filter.filtersForAnyParty.isEmpty)
if (req.filter.filtersByParty.isEmpty && req.filter.filtersForAnyParty.isEmpty) {
logger.debug("transaction filters were empty, will not return anything")
Source.empty
else {
} else {
LoggingContextWithTrace.withEnrichedLoggingContext(
logging.startExclusive(req.startExclusive),
logging.endInclusive(req.endInclusive),

View File

@ -55,12 +55,12 @@ private[apiserver] final class ApiPackageManagementService private (
readService
.listLfPackages()
.map { pkgs =>
ListKnownPackagesResponse(pkgs.toSeq.map { case (pkgId, details) =>
ListKnownPackagesResponse(pkgs.map { pkgDescription =>
PackageDetails(
pkgId.toString,
details.size,
Some(TimestampConversion.fromLf(details.knownSince)),
details.sourceDescription.getOrElse(""),
pkgDescription.packageId.toString,
pkgDescription.packageSize.toLong,
Some(TimestampConversion.fromLf(pkgDescription.uploadedAt.underlying)),
pkgDescription.sourceDescription.toString,
)
})
}
@ -73,8 +73,7 @@ private[apiserver] final class ApiPackageManagementService private (
) { implicit loggingContext: LoggingContextWithTrace =>
logger.info(s"Validating DAR file, ${loggingContext.serializeFiltered("submissionId")}.")
readService
// TODO(#17635): Use proper name for darName
.validateDar(dar = request.darFile, darName = "validateDar")
.validateDar(dar = request.darFile, darName = "defaultDarName")
.flatMap {
case SubmissionResult.Acknowledged => Future.successful(ValidateDarFileResponse())
case err: SubmissionResult.SynchronousError => Future.failed(err.exception)

View File

@ -15,6 +15,7 @@ import com.digitalasset.canton.ledger.api.domain.{
EventId,
Filters,
ParticipantOffset,
TemplateWildcardFilter,
TransactionFilter,
TransactionId,
}
@ -85,10 +86,30 @@ package object logging {
)
private def filtersToLoggingValue(filters: Filters): LoggingValue =
filters.inclusive match {
filters.cumulative match {
case None => LoggingValue.from("all-templates")
case Some(inclusiveFilters) =>
LoggingValue.from(inclusiveFilters.templateFilters.map(_.templateTypeRef))
case Some(cumulativeFilter) =>
LoggingValue.Nested(
LoggingEntries.fromMap(
Map(
"templates" -> LoggingValue.from(
cumulativeFilter.templateFilters.map(_.templateTypeRef)
),
"interfaces" -> LoggingValue.from(
cumulativeFilter.interfaceFilters.map(_.interfaceId)
),
)
++ (cumulativeFilter.templateWildcardFilter match {
case Some(TemplateWildcardFilter(includeCreatedEventBlob)) =>
Map(
"all-templates, created_event_blob" -> LoggingValue.from(
includeCreatedEventBlob
)
)
case None => Map.empty
})
)
)
}
private[services] def submissionId(id: String): LoggingEntry =

View File

@ -23,8 +23,8 @@ import com.daml.tracing.{Event, SpanAttribute, Spans}
import com.digitalasset.canton.concurrent.DirectExecutionContext
import com.digitalasset.canton.data.Offset
import com.digitalasset.canton.ledger.api.domain.{
CumulativeFilter,
Filters,
InclusiveFilters,
ParticipantOffset,
TransactionFilter,
TransactionId,
@ -511,15 +511,16 @@ object IndexServiceImpl {
if (!metadata.templates.contains(templateId)) unknownTemplateIds += templateId
}
val inclusiveFilters = domainTransactionFilter.filtersByParty.iterator.flatMap(
_._2.inclusive.iterator
) ++ domainTransactionFilter.filtersForAnyParty.flatMap(_.inclusive).iterator
val cumulativeFilters = domainTransactionFilter.filtersByParty.iterator.flatMap(
_._2.cumulative.iterator
) ++ domainTransactionFilter.filtersForAnyParty.flatMap(_.cumulative).iterator
inclusiveFilters.foreach { case InclusiveFilters(templateFilters, interfaceFilters) =>
templateFilters.iterator.map(_.templateTypeRef).foreach(checkTypeConRef)
interfaceFilters.iterator.map(_.interfaceId).foreach { interfaceId =>
if (!metadata.interfaces.contains(interfaceId)) unknownInterfaceIds += interfaceId
}
cumulativeFilters.foreach {
case CumulativeFilter(templateFilters, interfaceFilters, _wildacrdFilter) =>
templateFilters.iterator.map(_.templateTypeRef).foreach(checkTypeConRef)
interfaceFilters.iterator.map(_.interfaceId).foreach { interfaceId =>
if (!metadata.interfaces.contains(interfaceId)) unknownInterfaceIds += interfaceId
}
}
val packageNames = unknownPackageNames.result()
@ -678,14 +679,14 @@ object IndexServiceImpl {
private def templateIds(
metadata: PackageMetadata,
inclusiveFilters: InclusiveFilters,
cumulativeFilter: CumulativeFilter,
): Set[Identifier] = {
val fromInterfacesDefs = inclusiveFilters.interfaceFilters.view
val fromInterfacesDefs = cumulativeFilter.interfaceFilters.view
.map(_.interfaceId)
.flatMap(metadata.interfacesImplementedBy.getOrElse(_, Set.empty))
.toSet
val fromTemplateDefs = inclusiveFilters.templateFilters.view
val fromTemplateDefs = cumulativeFilter.templateFilters.view
.map(_.templateTypeRef)
.flatMap {
case TypeConRef(PackageRef.Name(packageName), qualifiedName) =>
@ -702,8 +703,8 @@ object IndexServiceImpl {
): Map[Identifier, Option[Set[Party]]] = {
val templatesFilterByParty =
transactionFilter.filtersByParty.view.foldLeft(Map.empty[Identifier, Option[Set[Party]]]) {
case (acc, (party, Filters(Some(inclusiveFilters)))) =>
templateIds(metadata, inclusiveFilters).foldLeft(acc) { case (acc, templateId) =>
case (acc, (party, Filters(Some(cumulativeFilter)))) =>
templateIds(metadata, cumulativeFilter).foldLeft(acc) { case (acc, templateId) =>
val updatedPartySet = acc.getOrElse(templateId, Some(Set.empty[Party])).map(_ + party)
acc.updated(templateId, updatedPartySet)
}
@ -714,8 +715,8 @@ object IndexServiceImpl {
val templatesFilterForAnyParty: Map[Identifier, Option[Set[Party]]] =
transactionFilter.filtersForAnyParty
.fold(Set.empty[Identifier]) {
case Filters(Some(inclusiveFilters)) =>
templateIds(metadata, inclusiveFilters)
case Filters(Some(cumulativeFilter)) =>
templateIds(metadata, cumulativeFilter)
case _ => Set.empty
}
.map((_, None))
@ -727,20 +728,34 @@ object IndexServiceImpl {
}
// template-wildcard for the parties or party-wildcards of the filter given
private[index] def wildcardFilter(
transactionFilter: domain.TransactionFilter
): Option[Set[Party]] = {
transactionFilter.filtersForAnyParty match {
case Some(Filters(None)) => None
case Some(Filters(Some(InclusiveFilters(templateIds, interfaceFilters))))
if templateIds.isEmpty && interfaceFilters.isEmpty =>
None
case Some(Filters(None)) => None // party-wildcard
case Some(Filters(Some(CumulativeFilter(_, _, templateWildcardFilter))))
if templateWildcardFilter.isDefined =>
None // party-wildcard
case Some(
Filters(Some(CumulativeFilter(templateIds, interfaceFilters, templateWildcardFilter)))
) if templateIds.isEmpty && interfaceFilters.isEmpty && templateWildcardFilter.isEmpty =>
None // TODO(#19364) do not allow this situation throw Exception
case _ =>
Some(transactionFilter.filtersByParty.view.collect {
case (party, Filters(None)) =>
party
case (party, Filters(Some(InclusiveFilters(templateIds, interfaceFilters))))
if templateIds.isEmpty && interfaceFilters.isEmpty =>
case (party, Filters(Some(CumulativeFilter(_, _, templateWildcardFilter))))
if templateWildcardFilter.isDefined =>
party
case (
party,
Filters(
Some(CumulativeFilter(templateIds, interfaceFilters, templateWildcardFilter))
),
)
if templateIds.isEmpty && interfaceFilters.isEmpty && templateWildcardFilter.isEmpty =>
// TODO(#19364) do not allow this situation throw Exception
party
}.toSet)
}

View File

@ -20,8 +20,6 @@ final case class IndexerConfig(
inputMappingParallelism: NonNegativeInt =
NonNegativeInt.tryCreate(DefaultInputMappingParallelism),
maxInputBufferSize: NonNegativeInt = NonNegativeInt.tryCreate(DefaultMaxInputBufferSize),
// TODO(#17635): Move once only Admin API package service depends on it
packageMetadataView: PackageMetadataViewConfig = DefaultPackageMetadataViewConfig,
restartDelay: NonNegativeFiniteDuration =
NonNegativeFiniteDuration.ofSeconds(DefaultRestartDelay.toSeconds),
submissionBatchSize: Long = DefaultSubmissionBatchSize,
@ -64,6 +62,4 @@ object IndexerConfig {
val DefaultEnableCompression: Boolean = false
val DefaultMaxOutputBatchedBufferSize: Int = 16
val DefaultMaxTailerBatchSize: Int = 10
val DefaultPackageMetadataViewConfig: PackageMetadataViewConfig =
PackageMetadataViewConfig.Default
}

View File

@ -5,7 +5,7 @@ package com.digitalasset.canton.platform.store.dao
import com.daml.lf.data.Ref.*
import com.digitalasset.canton.ledger.api.domain
import com.digitalasset.canton.ledger.api.domain.{Filters, InclusiveFilters}
import com.digitalasset.canton.ledger.api.domain.{CumulativeFilter, Filters, TemplateWildcardFilter}
import com.digitalasset.canton.platform.store.dao.EventProjectionProperties.Projection
import scala.collection.View
@ -19,13 +19,19 @@ import scala.collection.View
* populated for all the templates,if None then contract arguments
* for all the parties and for all the templates will be populated
* @param witnessTemplateProjections per witness party, per template projections
* @param templateWildcardCreatedEventBlobParties parties for which the created event blob will be
* populated for all the templates, if None then
* blobs for all the parties and all the templates
* will be populated
*/
final case class EventProjectionProperties(
verbose: Boolean,
templateWildcardWitnesses: Option[Set[String]],
// Map((witness or wildcard) -> Map(template -> projection)), where a None key denotes a party wildcard
witnessTemplateProjections: Map[Option[String], Map[Identifier, Projection]] = Map.empty,
alwaysPopulateCreatedEventBlob: Boolean = false,
templateWildcardCreatedEventBlobParties: Option[Set[String]] = Some(
Set.empty
), // TODO(#19364) fuse with the templateWildcardWitnesses into a templateWildcard Projection and potentially include it into the following Map
) {
def render(witnesses: Set[String], templateId: Identifier): Projection = {
(witnesses.iterator.map(Some(_))
@ -37,7 +43,8 @@ final case class EventProjectionProperties(
contractArguments = templateWildcardWitnesses.fold(witnesses.nonEmpty)(parties =>
witnesses.exists(parties)
),
createdEventBlob = alwaysPopulateCreatedEventBlob,
createdEventBlob = templateWildcardCreatedEventBlobParties
.fold(witnesses.nonEmpty)(parties => witnesses.exists(parties)),
)
)(_ append _)
}
@ -78,12 +85,13 @@ object EventProjectionProperties {
verbose = verbose,
templateWildcardWitnesses =
templateWildcardWitnesses(transactionFilter, alwaysPopulateArguments),
templateWildcardCreatedEventBlobParties =
templateWildcardCreatedEventBlobParties(transactionFilter),
witnessTemplateProjections = witnessTemplateProjections(
transactionFilter,
interfaceImplementedBy,
resolveTemplateIds,
),
alwaysPopulateCreatedEventBlob = transactionFilter.alwaysPopulateCreatedEventBlob,
)
private def templateWildcardWitnesses(
@ -103,25 +111,54 @@ object EventProjectionProperties {
}
} else
domainTransactionFilter.filtersForAnyParty match {
case Some(Filters(None)) => None
case Some(Filters(Some(inclusive)))
if inclusive.templateFilters.isEmpty && inclusive.interfaceFilters.isEmpty =>
case Some(Filters(None)) =>
None
case Some(Filters(Some(cumulative))) if cumulative.templateWildcardFilter.isDefined =>
None
case Some(Filters(Some(cumulative)))
if cumulative.templateFilters.isEmpty && cumulative.interfaceFilters.isEmpty && cumulative.templateWildcardFilter.isEmpty =>
None
// filters for any party (party-wildcard) not defined at all or defined but for specific templates, getting the template wildcard witnesses from the filters by party
case _ =>
Some(
domainTransactionFilter.filtersByParty.iterator
.collect {
case (party, Filters(None)) => party
case (party, Filters(Some(empty)))
if empty.templateFilters.isEmpty && empty.interfaceFilters.isEmpty =>
case (party, Filters(None)) =>
party
case (party, Filters(Some(cumulative)))
if cumulative.templateWildcardFilter.isDefined =>
party
case (party, Filters(Some(empty)))
if empty.templateFilters.isEmpty && empty.interfaceFilters.isEmpty && empty.templateWildcardFilter.isEmpty =>
party // TODO(#19364) this should not happen
}
.map(_.toString)
.toSet
)
}
private def templateWildcardCreatedEventBlobParties(
domainTransactionFilter: domain.TransactionFilter
): Option[Set[String]] =
domainTransactionFilter.filtersForAnyParty match {
case Some(Filters(Some(CumulativeFilter(_, _, Some(TemplateWildcardFilter(true)))))) =>
None // include blobs for all templates and all parties
// filters for any party (party-wildcard) not defined at all or defined but for specific templates, getting the template wildcard witnesses from the filters by party
case _ =>
Some(
domainTransactionFilter.filtersByParty.iterator
.collect {
case (
party,
Filters(Some(CumulativeFilter(_, _, Some(TemplateWildcardFilter(true))))),
) =>
party
}
.map(_.toString)
.toSet
)
}
private def witnessTemplateProjections(
domainTransactionFilter: domain.TransactionFilter,
interfaceImplementedBy: Identifier => Set[Identifier],
@ -134,10 +171,10 @@ object EventProjectionProperties {
domainTransactionFilter.filtersForAnyParty.toList.view.map((None, _))
(for {
(partyO, filters) <- partyFilterPairs
inclusiveFilters <- filters.inclusive.toList.view
cumulativeFilter <- filters.cumulative.toList.view
} yield {
val interfaceFilterProjections = for {
interfaceFilter <- inclusiveFilters.interfaceFilters.view
interfaceFilter <- cumulativeFilter.interfaceFilters.view
implementor <- interfaceImplementedBy(interfaceFilter.interfaceId).view
} yield implementor -> Projection(
interfaces =
@ -145,7 +182,7 @@ object EventProjectionProperties {
createdEventBlob = interfaceFilter.includeCreatedEventBlob,
contractArguments = false,
)
val templateProjections = getTemplateProjections(inclusiveFilters, resolveTemplateIds)
val templateProjections = getTemplateProjections(cumulativeFilter, resolveTemplateIds)
val projectionsForParty =
(interfaceFilterProjections ++ templateProjections)
.groupMap(_._1)(_._2)
@ -158,11 +195,11 @@ object EventProjectionProperties {
}
private def getTemplateProjections(
inclusiveFilters: InclusiveFilters,
cumulativeFilter: CumulativeFilter,
resolveTemplateIds: TypeConRef => Set[Identifier],
): View[(Identifier, Projection)] =
for {
templateFilter <- inclusiveFilters.templateFilters.view
templateFilter <- cumulativeFilter.templateFilters.view
templateId <- resolveTemplateIds(templateFilter.templateTypeRef).view
} yield templateId -> Projection(
interfaces = Set.empty,

View File

@ -7,9 +7,10 @@ import com.daml.error.{ContextualizedErrorLogger, NoLogging}
import com.daml.ledger.api.v2.participant_offset.ParticipantOffset
import com.daml.ledger.api.v2.participant_offset.ParticipantOffset.ParticipantBoundary
import com.daml.ledger.api.v2.state_service.GetLedgerEndRequest
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.{
CumulativeFilter,
Filters,
InclusiveFilters,
InterfaceFilter,
TemplateFilter,
*,
@ -47,24 +48,28 @@ class UpdateServiceRequestValidatorTest
Map(
party ->
Filters(
Some(
InclusiveFilters(
templateFilters = templateIdsForParty.map(tId => TemplateFilter(Some(tId))),
interfaceFilters = Seq(
InterfaceFilter(
interfaceId = Some(
Identifier(
packageId,
moduleName = includedModule,
entityName = includedTemplate,
)
),
includeInterfaceView = true,
includeCreatedEventBlob = true,
)
),
templateIdsForParty
.map(tId =>
CumulativeFilter(IdentifierFilter.TemplateFilter(TemplateFilter(Some(tId))))
)
)
++
Seq(
CumulativeFilter(
IdentifierFilter.InterfaceFilter(
InterfaceFilter(
interfaceId = Some(
Identifier(
packageId,
moduleName = includedModule,
entityName = includedTemplate,
)
),
includeInterfaceView = true,
includeCreatedEventBlob = true,
)
)
)
)
)
)
)
@ -142,7 +147,11 @@ class UpdateServiceRequestValidatorTest
requestMustFailWith(
request = validator.validate(
txReq.update(_.filter.filtersByParty.modify(_.map { case (p, f) =>
p -> f.update(_.inclusive := InclusiveFilters(Seq(InterfaceFilter(None, true))))
p -> f.update(
_.cumulative := Seq(
CumulativeFilter(IdentifierFilter.InterfaceFilter(InterfaceFilter(None, true)))
)
)
})),
ledgerEnd,
),
@ -268,7 +277,7 @@ class UpdateServiceRequestValidatorTest
inside(
validator.validate(
txReq.update(_.filter.filtersByParty.modify(_.map { case (p, f) =>
p -> f.update(_.inclusive := InclusiveFilters(Nil, Nil))
p -> f.update(_.cumulative := Seq(CumulativeFilter.defaultInstance))
})),
ledgerEnd,
)
@ -279,7 +288,7 @@ class UpdateServiceRequestValidatorTest
filtersByParty should have size 1
inside(filtersByParty.headOption.value) { case (p, filters) =>
p shouldEqual party
filters shouldEqual domain.Filters(Some(domain.InclusiveFilters(Set(), Set())))
filters shouldEqual domain.Filters(None)
}
req.verbose shouldEqual verbose
}
@ -289,7 +298,7 @@ class UpdateServiceRequestValidatorTest
inside(
validator.validate(
txReq.update(_.filter.filtersByParty.modify(_.map { case (p, f) =>
p -> f.update(_.optionalInclusive := None)
p -> f.update(_.cumulative := Seq())
})),
ledgerEnd,
)
@ -344,18 +353,23 @@ class UpdateServiceRequestValidatorTest
TransactionFilter(
Map(
party -> Filters(
Some(
InclusiveFilters(
interfaceFilters = Seq(
Seq(
CumulativeFilter(
IdentifierFilter.InterfaceFilter(
InterfaceFilter(
interfaceId = Some(templateId),
includeInterfaceView = true,
includeCreatedEventBlob = true,
)
),
templateFilters = Seq(TemplateFilter(Some(templateId), true)),
)
)
)
++
Seq(
CumulativeFilter(
IdentifierFilter.TemplateFilter(TemplateFilter(Some(templateId), true))
)
)
)
)
)
@ -367,7 +381,7 @@ class UpdateServiceRequestValidatorTest
Map(
party -> domain.Filters(
Some(
domain.InclusiveFilters(
domain.CumulativeFilter(
templateFilters = Set(
domain.TemplateFilter(
TypeConRef.assertFromString("packageId:includedModule:includedTemplate"),
@ -383,6 +397,7 @@ class UpdateServiceRequestValidatorTest
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
)
@ -391,68 +406,6 @@ class UpdateServiceRequestValidatorTest
}
}
"validating tree requests" should {
"tolerate missing filters_inclusive" in {
inside(validator.validateTree(txTreeReq, ledgerEnd)) { case Right(req) =>
req.startExclusive shouldEqual domain.ParticipantOffset.ParticipantBegin
req.endInclusive shouldEqual Some(domain.ParticipantOffset.Absolute(absoluteOffset))
req.parties should have size 1
req.parties.headOption.value shouldEqual party
req.verbose shouldEqual verbose
}
}
"not tolerate having filters_inclusive" in {
requestMustFailWith(
request = validator.validateTree(
txTreeReq.update(_.filter.filtersByParty.modify(_.map { case (p, f) =>
p -> f.update(_.optionalInclusive := Some(InclusiveFilters()))
})),
ledgerEnd,
),
code = INVALID_ARGUMENT,
description =
"INVALID_ARGUMENT(8,0): The submitted request has invalid arguments: party attempted subscription for templates. Template filtration is not supported on GetTransactionTrees RPC. To get filtered data, use the GetTransactions RPC.",
metadata = Map.empty,
)
}
"return the correct error when begin offset is after ledger end" in {
requestMustFailWith(
request = validator.validateTree(
txTreeReq.withBeginExclusive(
ParticipantOffset(
ParticipantOffset.Value.Absolute((ledgerEnd.value.toInt + 1).toString)
)
),
ledgerEnd,
),
code = OUT_OF_RANGE,
description =
"OFFSET_AFTER_LEDGER_END(12,0): Begin offset (1001) is after ledger end (1000)",
metadata = Map.empty,
)
}
"return the correct error when end offset is after ledger end" in {
requestMustFailWith(
request = validator.validateTree(
txTreeReq.withEndInclusive(
ParticipantOffset(
ParticipantOffset.Value.Absolute((ledgerEnd.value.toInt + 1).toString)
)
),
ledgerEnd,
),
code = OUT_OF_RANGE,
description =
"OFFSET_AFTER_LEDGER_END(12,0): End offset (1001) is after ledger end (1000)",
metadata = Map.empty,
)
}
}
"validating transaction by id requests" should {
"fail on empty transactionId" in {
@ -524,17 +477,6 @@ class UpdateServiceRequestValidatorTest
)
}
"reject transaction tree requests for unknown parties" in {
requestMustFailWith(
request = partyRestrictiveValidator
.validateTree(txTreeReq.withFilter(filterWithUnknown), ledgerEnd),
code = INVALID_ARGUMENT,
description =
"INVALID_ARGUMENT(8,0): The submitted request has invalid arguments: Unknown parties: [Alice, Bob]",
metadata = Map.empty,
)
}
"reject transaction by id requests for unknown parties" in {
requestMustFailWith(
request = partyRestrictiveValidator.validateTransactionById(
@ -566,13 +508,6 @@ class UpdateServiceRequestValidatorTest
) shouldBe a[Right[_, _]]
}
"accept transaction tree requests for known parties" in {
partyRestrictiveValidator.validateTree(
txTreeReq.withFilter(filterWithKnown),
ledgerEnd,
) shouldBe a[Right[_, _]]
}
"accept transaction by id requests for known parties" in {
partyRestrictiveValidator.validateTransactionById(
txByIdReq.withRequestingParties(List("party"))

View File

@ -61,7 +61,7 @@ trait ValidatorTestUtils extends Matchers with Inside with OptionValues {
p shouldEqual party
filters shouldEqual domain.Filters(
Some(
domain.InclusiveFilters(
domain.CumulativeFilter(
templateFilters =
expectedTemplates.map(TemplateFilter(_, includeCreatedEventBlob = false)),
interfaceFilters = Set(
@ -77,6 +77,7 @@ trait ValidatorTestUtils extends Matchers with Inside with OptionValues {
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
)

View File

@ -13,7 +13,7 @@ import com.digitalasset.canton.config.NonNegativeFiniteDuration
import com.digitalasset.canton.config.RequireTypes.{NonNegativeInt, Port}
import com.digitalasset.canton.platform.apiserver.configuration.RateLimitingConfig
import com.digitalasset.canton.platform.config.{IdentityProviderManagementConfig, *}
import com.digitalasset.canton.platform.indexer.{IndexerConfig, PackageMetadataViewConfig}
import com.digitalasset.canton.platform.indexer.IndexerConfig
import com.digitalasset.canton.platform.store.DbSupport
import com.digitalasset.canton.platform.store.DbSupport.DataSourceProperties
import com.digitalasset.canton.platform.store.backend.postgresql.PostgresDataSourceConfig
@ -179,11 +179,6 @@ object ArbitraryConfig {
optElement <- Gen.option(element)
} yield optElement
val packageMetadataViewConfig = for {
initLoadParallelism <- Gen.chooseNum(0, Int.MaxValue)
initProcessParallelism <- Gen.chooseNum(0, Int.MaxValue)
} yield PackageMetadataViewConfig(initLoadParallelism, initProcessParallelism)
val indexerConfig = for {
batchingParallelism <- nonNegativeIntGen
enableCompression <- Gen.oneOf(true, false)
@ -192,7 +187,6 @@ object ArbitraryConfig {
maxInputBufferSize <- nonNegativeIntGen
restartDelay <- nonNegativeFiniteDurationGen
submissionBatchSize <- Gen.long
packageMetadataViewConfig <- packageMetadataViewConfig
} yield IndexerConfig(
batchingParallelism = batchingParallelism,
enableCompression = enableCompression,
@ -201,7 +195,6 @@ object ArbitraryConfig {
maxInputBufferSize = maxInputBufferSize,
restartDelay = restartDelay,
submissionBatchSize = submissionBatchSize,
packageMetadataView = packageMetadataViewConfig,
)
def genActiveContractsServiceStreamConfig: Gen[ActiveContractsServiceStreamsConfig] =

View File

@ -16,8 +16,8 @@ import com.digitalasset.canton.platform.config.{
IndexServiceConfig,
UserManagementServiceConfig,
}
import com.digitalasset.canton.platform.indexer.IndexerConfig
import com.digitalasset.canton.platform.indexer.ha.HaConfig
import com.digitalasset.canton.platform.indexer.{IndexerConfig, PackageMetadataViewConfig}
import com.digitalasset.canton.platform.store.DbSupport.ParticipantDataSourceConfig
import com.digitalasset.canton.platform.store.backend.postgresql.PostgresDataSourceConfig
import com.digitalasset.canton.platform.store.backend.postgresql.PostgresDataSourceConfig.SynchronousCommitValue
@ -86,7 +86,6 @@ class PureConfigReaderWriterSpec
Some("RateLimitingConfig"),
)
testReaderWriterIsomorphism(secure, ArbitraryConfig.indexerConfig)
testReaderWriterIsomorphism(secure, ArbitraryConfig.packageMetadataViewConfig)
testReaderWriterIsomorphism(secure, ArbitraryConfig.commandServiceConfig)
testReaderWriterIsomorphism(secure, ArbitraryConfig.indexServiceConfig)
}
@ -385,28 +384,6 @@ class PureConfigReaderWriterSpec
convert(haConfigConvert, value).left.value.prettyPrint(0) should include("Unknown key")
}
behavior of "PackageMetadataViewConfig"
val validPackageMetadataViewConfigValue =
"""
| init-load-parallelism = 16
| init-process-parallelism = 16
| init-takes-too-long-initial-delay = 1 minute
| init-takes-too-long-interval = 10 seconds
| """.stripMargin
it should "support current defaults" in {
val value = validPackageMetadataViewConfigValue
convert(packageMetadataViewConfigConvert, value).value shouldBe PackageMetadataViewConfig()
}
it should "not support unknown keys" in {
val value = "unknown-key=yes\n" + validPackageMetadataViewConfigValue
convert(packageMetadataViewConfigConvert, value).left.value.prettyPrint(0) should include(
"Unknown key"
)
}
behavior of "IndexerConfig"
val validIndexerConfigValue =

View File

@ -8,10 +8,11 @@ import com.daml.lf.data.Ref
import com.daml.lf.data.Ref.{Identifier, Party, QualifiedName, TypeConRef}
import com.daml.nonempty.NonEmpty
import com.digitalasset.canton.ledger.api.domain.{
CumulativeFilter,
Filters,
InclusiveFilters,
InterfaceFilter,
TemplateFilter,
TemplateWildcardFilter,
TransactionFilter,
}
import com.digitalasset.canton.ledger.error.groups.RequestValidationErrors
@ -57,7 +58,15 @@ class IndexServiceImplSpec
val memoFunc = memoizedTransactionFilterProjection(
getPackageMetadataSnapshot = getPackageMetadata,
transactionFilter = TransactionFilter(
filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set(iface1Filter))))
filtersByParty = Map(
party -> Filters(
CumulativeFilter(
templateFilters = Set(),
interfaceFilters = Set(iface1Filter),
templateWildcardFilter = None,
)
)
)
),
verbose = true,
alwaysPopulateArguments = false,
@ -71,9 +80,11 @@ class IndexServiceImplSpec
(
TemplatePartiesFilter(Map(template1 -> Some(Set(party))), Some(Set())),
EventProjectionProperties(
true,
Some(Set.empty),
Map(Some(party.toString) -> Map(template1 -> Projection(Set(iface1), false, false))),
verbose = true,
templateWildcardWitnesses = Some(Set.empty),
templateWildcardCreatedEventBlobParties = Some(Set.empty),
witnessTemplateProjections =
Map(Some(party.toString) -> Map(template1 -> Projection(Set(iface1), false, false))),
),
)
) // filter gets complicated, filters template1 for iface1, projects iface1
@ -92,14 +103,15 @@ class IndexServiceImplSpec
templateWildcardParties = Some(Set()),
),
EventProjectionProperties(
true,
Some(Set.empty),
Map(
verbose = true,
templateWildcardWitnesses = Some(Set.empty),
witnessTemplateProjections = Map(
Some(party.toString) -> Map(
template1 -> Projection(Set(iface1), false, false),
template2 -> Projection(Set(iface1), false, false),
)
),
templateWildcardCreatedEventBlobParties = Some(Set.empty),
),
)
) // filter gets even more complicated, filters template1 and template2 for iface1, projects iface1 for both templates
@ -112,7 +124,15 @@ class IndexServiceImplSpec
val memoFunc = memoizedTransactionFilterProjection(
getPackageMetadataSnapshot = getPackageMetadata,
transactionFilter = TransactionFilter(
filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set(iface1Filter))))
filtersByParty = Map(
party -> Filters(
CumulativeFilter(
templateFilters = Set(),
interfaceFilters = Set(iface1Filter),
templateWildcardFilter = None,
)
)
)
),
verbose = true,
alwaysPopulateArguments = true,
@ -121,9 +141,11 @@ class IndexServiceImplSpec
(
TemplatePartiesFilter(Map(template1 -> Some(Set(party))), Some(Set())),
EventProjectionProperties(
true,
Some(Set(party)),
Map(Some(party.toString) -> Map(template1 -> Projection(Set(iface1), false, false))),
verbose = true,
templateWildcardWitnesses = Some(Set(party)),
witnessTemplateProjections =
Map(Some(party.toString) -> Map(template1 -> Projection(Set(iface1), false, false))),
templateWildcardCreatedEventBlobParties = Some(Set.empty),
),
)
)
@ -142,7 +164,13 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party2 -> Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
)
)
)
@ -153,7 +181,13 @@ class IndexServiceImplSpec
filtersByParty = Map.empty,
filtersForAnyParty = Some(
Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
)
),
)
@ -166,9 +200,44 @@ class IndexServiceImplSpec
TransactionFilter(filtersByParty = Map(party -> Filters(None)))
) shouldBe Some(Set(party))
wildcardFilter(
TransactionFilter(filtersByParty =
Map(
party -> Filters(
Some(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter =
Some(TemplateWildcardFilter(includeCreatedEventBlob = false)),
)
)
)
)
)
) shouldBe Some(Set(party))
wildcardFilter(
TransactionFilter(filtersByParty = Map.empty, filtersForAnyParty = Some(Filters(None)))
) shouldBe None
wildcardFilter(
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(
Filters(
Some(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter =
Some(TemplateWildcardFilter(includeCreatedEventBlob = false)),
)
)
)
),
)
) shouldBe None
}
it should "support multiple template-wildcard filters" in new Scope {
@ -191,7 +260,13 @@ class IndexServiceImplSpec
filtersByParty = Map(
party -> Filters(None),
party2 -> Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
),
)
)
@ -214,7 +289,13 @@ class IndexServiceImplSpec
filtersByParty = Map(
party -> Filters(None),
party2 -> Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
),
),
filtersForAnyParty = Some(Filters(None)),
@ -224,13 +305,15 @@ class IndexServiceImplSpec
it should "be treated as wildcard filter if templateIds and interfaceIds are empty" in new Scope {
wildcardFilter(
TransactionFilter(filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set()))))
TransactionFilter(filtersByParty =
Map(party -> Filters(CumulativeFilter(Set(), Set(), None)))
)
) shouldBe Some(Set(party))
wildcardFilter(
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(InclusiveFilters(Set(), Set()))),
filtersForAnyParty = Some(Filters(CumulativeFilter(Set(), Set(), None))),
)
) shouldBe None
}
@ -282,7 +365,13 @@ class IndexServiceImplSpec
filtersByParty = Map(
party -> Filters(None),
party2 -> Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
),
)
),
@ -295,7 +384,13 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party2 -> Filters(
Some(InclusiveFilters(templateFilters = Set(template1Filter), interfaceFilters = Set()))
Some(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(),
templateWildcardFilter = None,
)
)
)
),
filtersForAnyParty = Some(Filters(None)),
@ -308,14 +403,16 @@ class IndexServiceImplSpec
it should "ignore template-wildcard filter of the shape where templateIds and interfaceIds are empty" in new Scope {
templateFilter(
PackageMetadata(),
TransactionFilter(filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set())))),
TransactionFilter(filtersByParty =
Map(party -> Filters(CumulativeFilter(Set(), Set(), None)))
),
) shouldBe Map.empty
templateFilter(
PackageMetadata(),
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(InclusiveFilters(Set(), Set()))),
filtersForAnyParty = Some(Filters(CumulativeFilter(Set(), Set(), None))),
),
) shouldBe Map.empty
}
@ -324,7 +421,7 @@ class IndexServiceImplSpec
templateFilter(
PackageMetadata(),
TransactionFilter(filtersByParty =
Map(party -> Filters(InclusiveFilters(Set(template1Filter), Set())))
Map(party -> Filters(CumulativeFilter(Set(template1Filter), Set(), None)))
),
) shouldBe Map(template1 -> Some(Set(party)))
@ -332,7 +429,7 @@ class IndexServiceImplSpec
PackageMetadata(),
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(InclusiveFilters(Set(template1Filter), Set()))),
filtersForAnyParty = Some(Filters(CumulativeFilter(Set(template1Filter), Set(), None))),
),
) shouldBe Map(template1 -> None)
}
@ -341,7 +438,7 @@ class IndexServiceImplSpec
templateFilter(
PackageMetadata(),
TransactionFilter(
filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set(iface1Filter))))
filtersByParty = Map(party -> Filters(CumulativeFilter(Set(), Set(iface1Filter), None)))
),
) shouldBe Map.empty
@ -349,7 +446,7 @@ class IndexServiceImplSpec
PackageMetadata(),
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(InclusiveFilters(Set(), Set(iface1Filter)))),
filtersForAnyParty = Some(Filters(CumulativeFilter(Set(), Set(iface1Filter), None))),
),
) shouldBe Map.empty
}
@ -358,7 +455,7 @@ class IndexServiceImplSpec
templateFilter(
PackageMetadata(interfacesImplementedBy = Map(iface1 -> Set(template1))),
TransactionFilter(
filtersByParty = Map(party -> Filters(InclusiveFilters(Set(), Set(iface1Filter))))
filtersByParty = Map(party -> Filters(CumulativeFilter(Set(), Set(iface1Filter), None)))
),
) shouldBe Map(template1 -> Some(Set(party)))
@ -366,7 +463,7 @@ class IndexServiceImplSpec
PackageMetadata(interfacesImplementedBy = Map(iface1 -> Set(template1))),
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(InclusiveFilters(Set(), Set(iface1Filter)))),
filtersForAnyParty = Some(Filters(CumulativeFilter(Set(), Set(iface1Filter), None))),
),
) shouldBe Map(template1 -> None)
}
@ -377,7 +474,7 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(Set(template1Filter), Set(iface1Filter))
CumulativeFilter(Set(template1Filter), Set(iface1Filter), None)
)
)
),
@ -389,7 +486,7 @@ class IndexServiceImplSpec
filtersByParty = Map.empty,
filtersForAnyParty = Some(
Filters(
InclusiveFilters(Set(template1Filter), Set(iface1Filter))
CumulativeFilter(Set(template1Filter), Set(iface1Filter), None)
)
),
),
@ -405,12 +502,13 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
CumulativeFilter(
templateFilters = Set(TemplateFilter(template3, false)),
interfaceFilters = Set(
iface1Filter,
iface2Filter,
),
templateWildcardFilter = None,
)
)
)
@ -430,21 +528,23 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
iface1Filter
),
templateWildcardFilter = None,
)
)
),
filtersForAnyParty = Some(
Filters(
InclusiveFilters(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
iface2Filter
),
templateWildcardFilter = None,
)
)
),
@ -461,21 +561,23 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
iface1Filter
),
templateWildcardFilter = None,
)
)
),
filtersForAnyParty = Some(
Filters(
InclusiveFilters(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
iface1Filter
),
templateWildcardFilter = None,
)
)
),
@ -495,7 +597,7 @@ class IndexServiceImplSpec
}
it should "return an unknown template for not known template" in new Scope {
val filters = Filters(InclusiveFilters(Set(template1Filter), Set()))
val filters = Filters(CumulativeFilter(Set(template1Filter), Set(), None))
checkUnknownIdentifiers(
TransactionFilter(filtersByParty = Map(party -> filters)),
@ -516,7 +618,7 @@ class IndexServiceImplSpec
}
it should "return an unknown interface for not known interface" in new Scope {
val filters = Filters(InclusiveFilters(Set(), Set(iface1Filter)))
val filters = Filters(CumulativeFilter(Set(), Set(iface1Filter), None))
checkUnknownIdentifiers(
TransactionFilter(filtersByParty = Map(party -> filters)),
@ -538,9 +640,10 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
Set(template1Filter, packageNameScopedTemplateFilter),
Set(iface1Filter),
CumulativeFilter(
templateFilters = Set(template1Filter, packageNameScopedTemplateFilter),
interfaceFilters = Set(iface1Filter),
templateWildcardFilter = None,
)
)
)
@ -563,9 +666,10 @@ class IndexServiceImplSpec
TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
Set(template1Filter, unknownTemplateRefFilter),
Set(iface1Filter),
CumulativeFilter(
templateFilters = Set(template1Filter, unknownTemplateRefFilter),
interfaceFilters = Set(iface1Filter),
templateWildcardFilter = None,
)
)
)
@ -584,9 +688,10 @@ class IndexServiceImplSpec
it should "succeed for all query filter identifiers known" in new Scope {
val filters = Filters(
InclusiveFilters(
Set(template1Filter, packageNameScopedTemplateFilter),
Set(iface1Filter),
CumulativeFilter(
templateFilters = Set(template1Filter, packageNameScopedTemplateFilter),
interfaceFilters = Set(iface1Filter),
templateWildcardFilter = None,
)
)
@ -618,9 +723,9 @@ class IndexServiceImplSpec
checkUnknownIdentifiers(
TransactionFilter(
filtersByParty = Map(
party -> Filters(InclusiveFilters(Set(template1Filter), Set(iface1Filter))),
party -> Filters(CumulativeFilter(Set(template1Filter), Set(iface1Filter), None)),
party2 -> Filters(
InclusiveFilters(Set(template2Filter, template3Filter), Set(iface2Filter))
CumulativeFilter(Set(template2Filter, template3Filter), Set(iface2Filter), None)
),
)
),

View File

@ -6,10 +6,11 @@ package com.digitalasset.canton.platform.store.dao
import com.daml.lf.data.Ref
import com.daml.lf.data.Ref.{Identifier, Party, TypeConRef}
import com.digitalasset.canton.ledger.api.domain.{
CumulativeFilter,
Filters,
InclusiveFilters,
InterfaceFilter,
TemplateFilter,
TemplateWildcardFilter,
TransactionFilter,
}
import com.digitalasset.canton.platform.store.dao.EventProjectionProperties.Projection
@ -50,7 +51,7 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project nothing in case of empty witnesses" in new Scope {
EventProjectionProperties(
transactionFilter = templateWildcardFilter,
transactionFilter = templateWildcardFilter(),
verbose = true,
interfaceImplementedBy = interfaceImpl,
resolveTemplateIds = noTemplatesForPackageName,
@ -59,7 +60,7 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
.render(Set.empty, id) shouldBe Projection(Set.empty, false, false)
EventProjectionProperties(
transactionFilter = templateWildcardPartyWildcardFilter,
transactionFilter = templateWildcardPartyWildcardFilter(),
verbose = true,
interfaceImplementedBy = interfaceImpl,
resolveTemplateIds = noTemplatesForPackageName,
@ -87,27 +88,28 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
private val transactionFilter = TransactionFilter(
filtersByParty = Map(
party -> Filters(
InclusiveFilters(
Set(TemplateFilter(template1, false)),
Set.empty,
CumulativeFilter(
templateFilters = Set(TemplateFilter(template1, false)),
interfaceFilters = Set.empty,
templateWildcardFilter = None,
)
),
party2 -> Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
false,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
),
party3 -> Filters.noFilter,
),
filtersForAnyParty = Some(Filters.noFilter),
alwaysPopulateCreatedEventBlob = true,
filtersForAnyParty = Some(Filters.templateWildcardFilter(true)),
)
val testee = EventProjectionProperties(
transactionFilter = transactionFilter,
@ -126,24 +128,26 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob and contractArguments in case of match by interface, template-id and" +
"package-name-scoped template when interface filters are defined with party-wildcard and template filters by party" in new Scope {
private val templateFilters = Filters(
InclusiveFilters(
Set(
CumulativeFilter(
templateFilters = Set(
template1Filter.copy(includeCreatedEventBlob = true),
TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = false),
),
Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter = None,
)
)
private val interfaceFilters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
@ -181,24 +185,26 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob and contractArguments in case of match by interface, template-id and" +
"package-name-scoped template when template filters are defined with party-wildcard and interface filters by party" in new Scope {
private val templateFilters = Filters(
InclusiveFilters(
Set(
CumulativeFilter(
templateFilters = Set(
template1Filter.copy(includeCreatedEventBlob = true),
TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = false),
),
Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter = None,
)
)
private val interfaceFilters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
private val transactionFilter =
@ -234,23 +240,25 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob and interface in case of match by interface and template when filters exist in party-wildcard and by party" in new Scope {
private val templateFilters = Filters(
InclusiveFilters(
Set(
CumulativeFilter(
templateFilters = Set(
template1Filter.copy(includeCreatedEventBlob = true)
),
Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter = None,
)
)
private val interfaceFilters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
@ -407,7 +415,8 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project contract arguments in case of template-wildcard match" ++ details in new Scope {
private val eventProjectionProperties = EventProjectionProperties(
transactionFilter =
if (withPartyWildcard) templateWildcardPartyWildcardFilter else templateWildcardFilter,
if (withPartyWildcard) templateWildcardPartyWildcardFilter()
else templateWildcardFilter(),
verbose = true,
interfaceImplementedBy = noInterface,
resolveTemplateIds = noTemplatesForPackageName,
@ -423,10 +432,10 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
) shouldBe Projection(Set.empty, false, withPartyWildcard)
}
it should "project contract arguments in case of empty InclusiveFilters" ++ details in new Scope {
it should "project contract arguments in case of empty CumulativeFilters" ++ details in new Scope {
private val eventProjectionProperties = EventProjectionProperties(
transactionFilter =
if (withPartyWildcard) emptyInclusivePartyWildcardFilters else emptyInclusiveFilters,
if (withPartyWildcard) emptyCumulativePartyWildcardFilters else emptyCumulativeFilters,
verbose = true,
interfaceImplementedBy = noInterface,
resolveTemplateIds = noTemplatesForPackageName,
@ -443,19 +452,27 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
}
it should "project contract arguments with template-wildcard and another filter" ++ details in new Scope {
private val filters = Filters(Some(InclusiveFilters(Set.empty, Set.empty)))
private val filters = Filters(
Some(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set.empty,
templateWildcardFilter = Some(TemplateWildcardFilter(includeCreatedEventBlob = false)),
)
)
)
private val transactionFilter = withPartyWildcard match {
case false =>
TransactionFilter(
Map(
party -> filters,
party2 -> Filters(Some(InclusiveFilters(Set(template1Filter), Set.empty))),
party2 -> Filters(Some(CumulativeFilter(Set(template1Filter), Set.empty, None))),
)
)
case true =>
TransactionFilter(
filtersByParty = Map(
party2 -> Filters(Some(InclusiveFilters(Set(template1Filter), Set.empty)))
party2 -> Filters(Some(CumulativeFilter(Set(template1Filter), Set.empty, None)))
),
filtersForAnyParty = Some(filters),
)
@ -485,9 +502,12 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project contract arguments if interface filter and package-name scope template filter" ++ details in new Scope {
private val filters: Filters = Filters(
Some(
InclusiveFilters(
Set(TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = false)),
Set(InterfaceFilter(iface1, includeView = true, includeCreatedEventBlob = false)),
CumulativeFilter(
templateFilters =
Set(TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = false)),
interfaceFilters =
Set(InterfaceFilter(iface1, includeView = true, includeCreatedEventBlob = false)),
templateWildcardFilter = None,
)
)
)
@ -522,19 +542,19 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project contract arguments with template-wildcard and another filter with alwaysPopulateArguments, " +
"if queried non template-wildcard party/template combination" ++ details in new Scope {
private val filters: Filters = Filters(Some(InclusiveFilters(Set.empty, Set.empty)))
private val filters: Filters = Filters(Some(CumulativeFilter(Set.empty, Set.empty, None)))
private val transactionFilter = withPartyWildcard match {
case false =>
TransactionFilter(filtersByParty =
Map(
party -> filters,
party2 -> Filters(Some(InclusiveFilters(Set(template1Filter), Set.empty))),
party2 -> Filters(Some(CumulativeFilter(Set(template1Filter), Set.empty, None))),
)
)
case true =>
TransactionFilter(
filtersByParty = Map(
party2 -> Filters(Some(InclusiveFilters(Set(template1Filter), Set.empty)))
party2 -> Filters(Some(CumulativeFilter(Set(template1Filter), Set.empty, None)))
),
filtersForAnyParty = Some(filters),
)
@ -574,15 +594,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project interface in case of match by interface id and witness" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -620,15 +641,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project interface in case of match by interface id and witness with alwaysPopulateArguments" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -666,15 +688,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "not project interface in case of match by interface id and witness" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = false,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -708,15 +731,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "not project interface in case of match by interface id but not witness with alwaysPopulateArguments" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -748,15 +772,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project an interface and template in case of match by interface id, template and witness" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set(template1Filter),
Set(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -788,15 +813,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project an interface and template in case of match by interface id, template and witness with alwaysPopulateArguments" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set(template1Filter),
Set(
CumulativeFilter(
templateFilters = Set(template1Filter),
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -828,9 +854,9 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project multiple interfaces in case of match by multiple interface ids and witness" ++ details in new Scope {
private val filters = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
@ -842,6 +868,7 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
includeCreatedEventBlob = false,
),
),
templateWildcardFilter = None,
)
)
)
@ -880,29 +907,31 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project multiple interfaces in case of match by multiple interface ids and witness when combined with party-wildcard" in new Scope {
private val filter1 = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
private val filter2 = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface2,
includeView = true,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = None,
)
)
)
@ -930,9 +959,9 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "deduplicate projected interfaces and include the view" ++ details in new Scope {
private val filter1 = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = false,
@ -944,14 +973,15 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
includeCreatedEventBlob = false,
),
),
templateWildcardFilter = None,
)
)
)
private val filter2 = Filters(
Some(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
includeView = true,
@ -963,6 +993,7 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
includeCreatedEventBlob = false,
),
),
templateWildcardFilter = None,
)
)
)
@ -1005,15 +1036,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob in case of match by interface" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1051,20 +1083,70 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
)
}
it should "project created_event_blob in case of match by template-wildcard" ++ details in new Scope {
private val filters = Filters(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = false,
includeCreatedEventBlob = false,
)
),
templateWildcardFilter = Some(TemplateWildcardFilter(includeCreatedEventBlob = true)),
)
)
private val transactionFilter = withPartyWildcard match {
case false =>
TransactionFilter(filtersByParty = Map(party -> filters))
case true =>
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(filters),
)
}
private val eventProjectionProperties = EventProjectionProperties(
transactionFilter = transactionFilter,
verbose = true,
interfaceImplementedBy = interfaceImpl,
resolveTemplateIds = noTemplatesForPackageName,
alwaysPopulateArguments = false,
)
eventProjectionProperties.render(
Set(party),
template1,
) shouldBe Projection(
interfaces = Set.empty,
createdEventBlob = true,
contractArguments = true,
)
eventProjectionProperties.render(
Set(party2),
template1,
) shouldBe Projection(
interfaces = Set.empty,
createdEventBlob = withPartyWildcard,
contractArguments = withPartyWildcard,
)
}
it should "project created_event_blob in case of match by interface, template-id and package-name-scoped template" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set(
CumulativeFilter(
templateFilters = Set(
template1Filter.copy(includeCreatedEventBlob = true),
TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = false),
),
Set(
interfaceFilters = Set(
InterfaceFilter(
interfaceId = iface1,
includeView = false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1103,9 +1185,10 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob in case of match by interface and template with include the view" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set(template1Filter.copy(includeCreatedEventBlob = true)),
Set(InterfaceFilter(iface1, true, true)),
CumulativeFilter(
templateFilters = Set(template1Filter.copy(includeCreatedEventBlob = true)),
interfaceFilters = Set(InterfaceFilter(iface1, true, true)),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1146,9 +1229,9 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob in case of at least a single interface requesting it" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
false,
@ -1160,6 +1243,7 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
includeCreatedEventBlob = false,
),
),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1190,13 +1274,15 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
val template2Filter: TemplateFilter =
TemplateFilter(templateId = template2, includeCreatedEventBlob = false)
private val filters = Filters(
InclusiveFilters(
Set(
CumulativeFilter(
templateFilters = Set(
template1Filter,
template2Filter,
TemplateFilter(packageNameScopedTemplate, includeCreatedEventBlob = true),
),
Set(InterfaceFilter(iface1, false, false), InterfaceFilter(iface2, false, false)),
interfaceFilters =
Set(InterfaceFilter(iface1, false, false), InterfaceFilter(iface2, false, false)),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1236,15 +1322,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "not project created_event_blob in case of no match by interface" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1271,15 +1358,16 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob for wildcard templates, if it is specified explicitly via interface filter" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set.empty,
Set(
CumulativeFilter(
templateFilters = Set.empty,
interfaceFilters = Set(
InterfaceFilter(
iface1,
false,
includeCreatedEventBlob = true,
)
),
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1311,9 +1399,10 @@ class EventProjectionPropertiesSpec extends AnyFlatSpec with Matchers {
it should "project created_event_blob for wildcard templates, if it is specified explicitly via template filter" ++ details in new Scope {
private val filters = Filters(
InclusiveFilters(
Set(TemplateFilter(template1, true)),
Set.empty,
CumulativeFilter(
templateFilters = Set(TemplateFilter(template1, true)),
interfaceFilters = Set.empty,
templateWildcardFilter = None,
)
)
private val transactionFilter = withPartyWildcard match {
@ -1386,20 +1475,43 @@ object EventProjectionPropertiesSpec {
val party2: Party = Party.assertFromString("party2")
val party3: Party = Party.assertFromString("party3")
val noFilter = TransactionFilter(Map())
val templateWildcardFilter = TransactionFilter(Map(party -> Filters(None)))
val templateWildcardPartyWildcardFilter = TransactionFilter(
def templateWildcardFilter(includeCreatedEventBlob: Boolean = false) = TransactionFilter(
filtersByParty = Map(
party -> Filters(
Some(
CumulativeFilter(
Set.empty,
Set.empty,
Some(TemplateWildcardFilter(includeCreatedEventBlob = includeCreatedEventBlob)),
)
)
)
)
)
def templateWildcardPartyWildcardFilter(includeCreatedEventBlob: Boolean = false) =
TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(
Filters(
Some(
CumulativeFilter(
Set.empty,
Set.empty,
Some(TemplateWildcardFilter(includeCreatedEventBlob = includeCreatedEventBlob)),
)
)
)
),
)
val emptyCumulativeFilters = TransactionFilter(
Map(party -> Filters(Some(CumulativeFilter(Set.empty, Set.empty, None))))
)
val emptyCumulativePartyWildcardFilters = TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(None)),
filtersForAnyParty = Some(Filters(Some(CumulativeFilter(Set.empty, Set.empty, None)))),
)
val emptyInclusiveFilters = TransactionFilter(
Map(party -> Filters(Some(InclusiveFilters(Set.empty, Set.empty))))
)
val emptyInclusivePartyWildcardFilters = TransactionFilter(
filtersByParty = Map.empty,
filtersForAnyParty = Some(Filters(Some(InclusiveFilters(Set.empty, Set.empty)))),
)
def templateFilterFor(templateTypeRef: Ref.TypeConRef): Option[InclusiveFilters] = Some(
InclusiveFilters(Set(TemplateFilter(templateTypeRef, false)), Set.empty)
def templateFilterFor(templateTypeRef: Ref.TypeConRef): Option[CumulativeFilter] = Some(
CumulativeFilter(Set(TemplateFilter(templateTypeRef, false)), Set.empty, None)
)
}
}

View File

@ -17,7 +17,6 @@ import com.daml.lf.value.Value as LfValue
import com.digitalasset.canton.data.Offset
import com.digitalasset.canton.ledger.api.domain.TemplateFilter
import com.digitalasset.canton.ledger.participant.state
import com.digitalasset.canton.ledger.participant.state.index
import com.digitalasset.canton.logging.LoggingContextWithTrace
import com.digitalasset.canton.platform.store.entries.LedgerEntry
import com.digitalasset.canton.testing.utils.TestModels
@ -53,18 +52,13 @@ private[dao] trait JdbcLedgerDaoSuite extends JdbcLedgerDaoBackend with OptionVa
def toLong: Long = BigInt(offset.toByteArray).toLong
}
private val now = Timestamp.now()
private[this] lazy val dar =
TestModels.com_daml_ledger_test_ModelTestDar_path
.pipe(JarResourceUtils.resourceFileFromJar)
.pipe(DarParser.assertReadArchiveFromFile)
protected final lazy val packages: List[(DamlLf.Archive, index.PackageDetails)] =
dar.all.map(dar => dar -> index.PackageDetails(dar.getSerializedSize.toLong, now, None))
protected final lazy val packageMap =
packages.map { case (archive, _) => archive.getHash -> archive }.toMap
dar.all.map { archive => archive.getHash -> archive }.toMap
private val testPackageId: Ref.PackageId = Ref.PackageId.assertFromString(dar.main.getHash)
override def loadPackage: PackageId => Future[Option[DamlLf.Archive]] = pkgId =>

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --enable-interfaces=yes
name: carbonv1-tests

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --enable-interfaces=yes
name: carbonv2-tests

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
name: experimental-tests
source: .
version: 3.1.0

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --enable-interfaces=yes
name: model-tests

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
name: package-management-tests
source: .
version: 3.1.0

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --enable-interfaces=yes
name: semantic-tests

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
name: upgrade-tests
source: .
version: 1.0.0

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
name: upgrade-tests
source: .
version: 2.0.0

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
name: upgrade-tests
source: .
version: 3.0.0

View File

@ -8,9 +8,10 @@ import org.apache.pekko.stream.scaladsl.{Broadcast, Concat, Flow, GraphDSL, Sour
import org.apache.pekko.stream.{FanOutShape2, Graph}
import com.digitalasset.canton.fetchcontracts.util.GraphExtensions.*
import com.digitalasset.canton.fetchcontracts.util.IdentifierConverters.apiIdentifier
import com.daml.ledger.api.v2.transaction_filter.TemplateFilter
import com.daml.ledger.api.v2.transaction_filter.{CumulativeFilter, TemplateFilter}
import com.daml.ledger.api.v2 as lav2
import com.daml.ledger.api.v2.transaction.Transaction
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.scalautil.Statement.discard
import com.digitalasset.canton.http.domain.{ContractTypeId, ResolvedQuery}
import com.digitalasset.canton.logging.TracedLogger
@ -152,26 +153,33 @@ object AcsTxStreams extends NoTracing {
parties: domain.PartySet,
contractTypeIds: List[ContractTypeId.Resolved],
): lav2.transaction_filter.TransactionFilter = {
import lav2.transaction_filter.{Filters, InterfaceFilter, InclusiveFilters}
import lav2.transaction_filter.{Filters, InterfaceFilter}
val (templateIds, interfaceIds) = ResolvedQuery.partition(contractTypeIds)
val filters = Filters(
Some(
InclusiveFilters(
templateFilters = templateIds.map(templateId =>
TemplateFilter(
templateId = Some(apiIdentifier(templateId)),
includeCreatedEventBlob = false,
templateIds
.map(templateId =>
CumulativeFilter(
IdentifierFilter.TemplateFilter(
TemplateFilter(
templateId = Some(apiIdentifier(templateId)),
includeCreatedEventBlob = false,
)
)
),
interfaceFilters = interfaceIds.map(interfaceId =>
InterfaceFilter(
interfaceId = Some(apiIdentifier(interfaceId)),
includeInterfaceView = true,
)
),
)
)
)
++
interfaceIds
.map(interfaceId =>
CumulativeFilter(
IdentifierFilter.InterfaceFilter(
InterfaceFilter(
interfaceId = Some(apiIdentifier(interfaceId)),
includeInterfaceView = true,
)
)
)
)
)
lav2.transaction_filter.TransactionFilter(

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: JsonEncodingTest

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.dev
name: JsonEncodingTestDev

View File

@ -1,4 +1,4 @@
sdk-version: 3.1.0-snapshot.20240604.13114.0.v91c1c216
sdk-version: 3.1.0-snapshot.20240605.13117.0.v37969fdd
build-options:
- --target=2.1
name: AdminWorkflows

View File

@ -563,8 +563,7 @@ class ParticipantNodeBootstrap(
hashOps = syncCrypto.pureCrypto,
loggerFactory = loggerFactory,
metrics = arguments.metrics,
packageMetadataViewConfig =
config.parameters.ledgerApiServer.indexer.packageMetadataView,
packageMetadataViewConfig = config.parameters.packageMetadataView,
packageOps = createPackageOps(syncDomainPersistentStateManager),
timeouts = parameterConfig.processingTimeouts,
)

View File

@ -20,7 +20,7 @@ import com.digitalasset.canton.LedgerSubmissionId
import com.digitalasset.canton.concurrent.FutureSupervisor
import com.digitalasset.canton.config.CantonRequireTypes.LengthLimitedString.DarName
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.config.{PackageMetadataViewConfig, ProcessingTimeout}
import com.digitalasset.canton.crypto.{Hash, HashOps}
import com.digitalasset.canton.error.CantonError
import com.digitalasset.canton.ledger.error.PackageServiceErrors
@ -40,7 +40,6 @@ import com.digitalasset.canton.participant.store.memory.{
MutablePackageMetadataViewImpl,
PackageMetadataView,
}
import com.digitalasset.canton.platform.indexer.PackageMetadataViewConfig
import com.digitalasset.canton.platform.packages.DeduplicatingPackageLoader
import com.digitalasset.canton.protocol.{PackageDescription, PackageInfoService}
import com.digitalasset.canton.time.Clock
@ -79,7 +78,6 @@ class PackageService(
val packageDependencyResolver: PackageDependencyResolver,
protected val loggerFactory: NamedLoggerFactory,
metrics: ParticipantMetrics,
// TODO(#17635): wire PackageMetadataView to be used in the Ledger API instead of the existing one
val packageMetadataView: PackageMetadataView,
packageOps: PackageOps,
packageUploader: PackageUploader,

View File

@ -12,9 +12,10 @@ import com.daml.lf.data.Ref
import com.daml.lf.engine.Engine
import com.daml.lf.language.Ast
import com.digitalasset.canton.concurrent.FutureSupervisor
import com.digitalasset.canton.config.CantonRequireTypes.{String255, String256M}
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.crypto.{Hash, HashOps, HashPurpose}
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.ledger.error.PackageServiceErrors
import com.digitalasset.canton.lifecycle.{
FlagCloseable,
@ -101,39 +102,29 @@ class PackageUploader(
val darNameO =
fileNameO.map(fn => PathUtils.getFilenameWithoutExtension(Paths.get(fn).getFileName))
val hash = hashOps.digest(HashPurpose.DarIdentifier, darPayload)
for {
lengthValidatedNameO <- darNameO.traverse(darName =>
EitherT
.fromEither[FutureUnlessShutdown](
String255.create(darName, Some("DAR file name"))
)
.fromEither[FutureUnlessShutdown](String255.create(darName))
.leftMap(PackageServiceErrors.Reading.InvalidDarFileName.Error(_))
)
// TODO(#17635): Make DAR descriptor mandatory and always persist DAR payload
// Currently not done so if request coming from the Ledger API
darDescriptorO = lengthValidatedNameO.map(lengthValidatedName =>
Dar(DarDescriptor(hash, lengthValidatedName), darPayload.toByteArray)
)
dar <- readDarFromPayload(darPayload, darNameO)
sourceDescription = lengthValidatedNameO.getOrElse(
String255("package source description")()
)
_ = logger.debug(
s"Processing package upload of ${dar.all.length} packages from source $sourceDescription"
s"Processing package upload of ${dar.all.length} packages${darNameO
.fold("")(n => s" from $n")} for submissionId $submissionId"
)
mainPackage <- catchUpstreamErrors(Decode.decodeArchive(dar.main)).map(dar.main -> _)
dependencies <- dar.dependencies.parTraverse(archive =>
catchUpstreamErrors(Decode.decodeArchive(archive)).map(archive -> _)
)
_ <- EitherT(
hash <- EitherT(
uploadDarExecutionQueue.executeUnderFailuresUS(
uploadDarSequentialStep(
darO = darDescriptorO,
darPayload = darPayload,
mainPackage = mainPackage,
dependencies = dependencies,
// TODO(#17635): Source description only needed for package upload ledger sync events (which will be removed)
sourceDescription = sourceDescription.asString1GB,
// TODO(#17635): Allow more generic source descriptions or rename source description to DAR name
lengthValidatedDarName = lengthValidatedNameO,
submissionId = submissionId,
),
description = "store DAR",
@ -146,19 +137,23 @@ class PackageUploader(
// that a package validation against the current package metadata view
// is happening concurrently with an update of the package metadata view.
private def uploadDarSequentialStep(
darO: Option[Dar],
darPayload: ByteString,
mainPackage: (DamlLf.Archive, (LfPackageId, Ast.Package)),
dependencies: List[(DamlLf.Archive, (LfPackageId, Ast.Package))],
sourceDescription: String256M,
lengthValidatedDarName: Option[String255],
submissionId: LedgerSubmissionId,
)(implicit traceContext: TraceContext): FutureUnlessShutdown[Either[DamlError, Unit]] = {
def persist(allPackages: List[(DamlLf.Archive, (LfPackageId, Ast.Package))]) =
)(implicit traceContext: TraceContext): FutureUnlessShutdown[Either[DamlError, Hash]] = {
def persist(
dar: Dar,
uploadedAt: CantonTimestamp,
allPackages: List[(DamlLf.Archive, (LfPackageId, Ast.Package))],
) =
for {
_ <- packagesDarsStore.append(
pkgs = allPackages.map(_._1),
uploadedAt = clock.monotonicTime(),
sourceDescription = sourceDescription,
dar = darO,
uploadedAt = uploadedAt,
sourceDescription = lengthValidatedDarName.getOrElse(String255.empty),
dar = dar,
)
// update our dependency cache
// we need to do this due to an issue we can hit if we have pre-populated the cache
@ -166,24 +161,35 @@ class PackageUploader(
// now, that the package is loaded, we need to get rid of this None.
_ = packageDependencyResolver.clearPackagesNotPreviouslyFound()
_ = logger.debug(
s"Managed to upload one or more archives in submissionId $submissionId and sourceDescription $sourceDescription"
s"Managed to upload one or more archives for submissionId $submissionId"
)
_ = allPackages.foreach { case (_, (pkgId, pkg)) =>
packageMetadataView.update(PackageMetadata.from(pkgId, pkg))
}
} yield ()
validatePackages(mainPackage._2, dependencies.map(_._2)).semiflatMap { _ =>
val allPackages = mainPackage :: dependencies
val result = persist(allPackages)
handleUploadResult(result, submissionId, sourceDescription)
}.value
val uploadTime = clock.monotonicTime()
val hash = hashOps.digest(HashPurpose.DarIdentifier, darPayload)
val persistedDarName =
lengthValidatedDarName.getOrElse(String255.tryCreate(s"DAR_${hash.toHexString}"))
val darDescriptor =
Dar(
DarDescriptor(hash, persistedDarName),
darPayload.toByteArray,
)
validatePackages(mainPackage._2, dependencies.map(_._2))
.semiflatMap { _ =>
val allPackages = mainPackage :: dependencies
val result = persist(darDescriptor, uploadTime, allPackages)
handleUploadResult(result, submissionId)
}
.map(_ => hash)
.value
}
private def handleUploadResult(
res: FutureUnlessShutdown[Unit],
submissionId: LedgerSubmissionId,
sourceDescription: String256M,
)(implicit tc: TraceContext): FutureUnlessShutdown[Unit] =
res.transformWith {
case Success(UnlessShutdown.Outcome(_)) => FutureUnlessShutdown.unit
@ -197,7 +203,7 @@ class PackageUploader(
FutureUnlessShutdown.abortedDueToShutdown
case Failure(e) =>
logger.warn(
s"Failed to upload one or more archives in submissionId $submissionId and sourceDescription $sourceDescription",
s"Failed to upload one or more archives in submissionId $submissionId",
e,
)
// If JDBC insertion call failed, we don't know whether the DB was updated or not
@ -235,7 +241,7 @@ class PackageUploader(
): EitherT[FutureUnlessShutdown, DamlError, LfDar[DamlLf.Archive]] = {
val zipInputStream = new ZipInputStream(darPayload.newInput())
catchUpstreamErrors(
DarParser.readArchive(darNameO.getOrElse("package-upload"), zipInputStream)
DarParser.readArchive(darNameO.getOrElse("unknown-file-name"), zipInputStream)
).thereafter(_ => zipInputStream.close())
}

View File

@ -35,7 +35,7 @@ class GrpcPackageService(
activePackages <- service.listPackages(OptionUtil.zeroAsNone(request.limit))
} yield ListPackagesResponse(activePackages.map {
case protocol.PackageDescription(pid, sourceDescription, _uploadedAt, _size) =>
// TODO(#17635): Extend PB package description definition
// TODO(#17635): Extend PB package description definition to accommodate uploadedAt and size
v30.PackageDescription(pid, sourceDescription.unwrap)
})
}

View File

@ -335,6 +335,7 @@ object TestingTimeServiceConfig {
* @param allowForUnauthenticatedContractIds Skip contract id authentication check, if the contract id scheme does not support authentication.
* You should enable this only if all participants on a domain mutually trust each other.
* Otherwise, an attacker may compromise integrity of the ledger.
* @param packageMetadataView Initialization parameters for the package metadata in-memory store.
*/
final case class ParticipantNodeParameterConfig(
adminWorkflow: AdminWorkflowConfig = AdminWorkflowConfig(),
@ -365,6 +366,7 @@ final case class ParticipantNodeParameterConfig(
override val useUnifiedSequencer: Boolean = false,
allowForUnauthenticatedContractIds: Boolean = false,
watchdog: Option[WatchdogConfig] = None,
packageMetadataView: PackageMetadataViewConfig = PackageMetadataViewConfig(),
) extends LocalNodeParametersConfig
/** Parameters for the participant node's stores

View File

@ -292,9 +292,8 @@ class StartableStoppableLedgerApiServer(
)(implicit traceContext: TraceContext): Source[GetActiveContractsResponse, NotUsed] =
indexService.getActiveContracts(
filter = TransactionFilter(
filtersByParty = partyIds.view.map(_ -> Filters.noFilter).toMap,
filtersByParty = partyIds.view.map(_ -> Filters.templateWildcardFilter(true)).toMap,
filtersForAnyParty = None,
alwaysPopulateCreatedEventBlob = true,
),
verbose = false,
activeAtO = validAt,

View File

@ -4,9 +4,10 @@
package com.digitalasset.canton.participant.ledger.api.client
import com.daml.grpc.adapter.ExecutionSequencerFactory
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.{
CumulativeFilter,
Filters,
InclusiveFilters,
TemplateFilter,
TransactionFilter,
}
@ -67,10 +68,8 @@ object LedgerConnection {
case (p, Nil) => p.toProtoPrimitive -> Filters.defaultInstance
case (p, ts) =>
p.toProtoPrimitive -> Filters(
Some(
InclusiveFilters(
templateFilters = ts.map(tf => TemplateFilter(Some(tf), false))
)
ts.map(tf =>
CumulativeFilter(IdentifierFilter.TemplateFilter(TemplateFilter(Some(tf), false)))
)
)
})

View File

@ -7,7 +7,7 @@ import cats.data.OptionT
import com.daml.daml_lf_dev.DamlLf
import com.daml.lf.data.Ref.PackageId
import com.digitalasset.canton.concurrent.FutureSupervisor
import com.digitalasset.canton.config.CantonRequireTypes.String256M
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.crypto.Hash
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.lifecycle.FutureUnlessShutdown
@ -36,8 +36,8 @@ trait DamlPackageStore extends AutoCloseable { this: NamedLogging =>
def append(
pkgs: List[DamlLf.Archive],
uploadedAt: CantonTimestamp,
sourceDescription: String256M,
dar: Option[PackageService.Dar],
sourceDescription: String255,
dar: PackageService.Dar,
)(implicit
traceContext: TraceContext
): FutureUnlessShutdown[Unit]

View File

@ -10,8 +10,11 @@ import com.daml.nameof.NameOf.functionFullName
import com.daml.nonempty.NonEmpty
import com.digitalasset.canton.LfPackageId
import com.digitalasset.canton.concurrent.FutureSupervisor
import com.digitalasset.canton.config.CantonRequireTypes.LengthLimitedString.DarName
import com.digitalasset.canton.config.CantonRequireTypes.{LengthLimitedString, String256M}
import com.digitalasset.canton.config.CantonRequireTypes.LengthLimitedString.{
DarName,
setParameterLengthLimitedString,
}
import com.digitalasset.canton.config.CantonRequireTypes.{LengthLimitedString, String255}
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.config.RequireTypes.PositiveNumeric
import com.digitalasset.canton.crypto.Hash
@ -42,9 +45,9 @@ class DbDamlPackageStore(
with DbStore {
import DamlPackageStore.*
import DbStorage.Implicits.*
import storage.api.*
import storage.converters.*
import DbStorage.Implicits.*
// writeQueue is used to protect against concurrent insertions and deletions to/from the `par_dars` or `par_daml_packages` tables,
// which might otherwise data corruption or constraint violations.
@ -65,8 +68,8 @@ class DbDamlPackageStore(
private def insertOrUpdatePackages(
pkgs: List[DamlPackage],
darO: Option[DarDescriptor],
sourceDescription: String256M,
dar: DarDescriptor,
sourceDescription: String255,
)(implicit traceContext: TraceContext): DbAction.All[Unit] = {
val insertToDamlPackages = {
val sql = storage.profile match {
@ -115,7 +118,7 @@ class DbDamlPackageStore(
DbStorage.bulkOperation_(sql, pkgs, storage.profile) { pp => pkg =>
pp >> pkg.packageId
pp >> (if (sourceDescription.nonEmpty) sourceDescription
else String256M.tryCreate("default"))
else String255.tryCreate("default"))
pp >> pkg.data
pp >> pkg.uploadedAt
pp >> pkg.packageSize
@ -123,11 +126,10 @@ class DbDamlPackageStore(
}
}
val insertToDarPackages = darO
.map { dar =>
val sql = storage.profile match {
case _: DbStorage.Profile.Oracle =>
"""merge /*+ INDEX ( dar_packages (dar_hash_hex package_id) ) */
val insertToDarPackages = {
val sql = storage.profile match {
case _: DbStorage.Profile.Oracle =>
"""merge /*+ INDEX ( dar_packages (dar_hash_hex package_id) ) */
| into par_dar_packages
| using (
| select
@ -139,19 +141,18 @@ class DbDamlPackageStore(
| when not matched then
| insert (dar_hash_hex, package_id)
| values (excluded.dar_hash_hex, excluded.package_id)""".stripMargin
case _ =>
"""insert into par_dar_packages (dar_hash_hex, package_id)
case _ =>
"""insert into par_dar_packages (dar_hash_hex, package_id)
| values (?, ?)
| on conflict do
| nothing""".stripMargin
}
DbStorage.bulkOperation_(sql, pkgs, storage.profile) { pp => pkg =>
pp >> (dar.hash.toLengthLimitedHexString: LengthLimitedString)
pp >> pkg.packageId
}
}
.getOrElse(DBIO.successful(()))
DbStorage.bulkOperation_(sql, pkgs, storage.profile) { pp => pkg =>
pp >> (dar.hash.toLengthLimitedHexString: LengthLimitedString)
pp >> pkg.packageId
}
}
insertToDamlPackages.andThen(insertToDarPackages)
}
@ -159,8 +160,8 @@ class DbDamlPackageStore(
override def append(
pkgs: List[DamlLf.Archive],
uploadedAt: CantonTimestamp,
sourceDescription: String256M,
dar: Option[PackageService.Dar],
sourceDescription: String255,
dar: PackageService.Dar,
)(implicit
traceContext: TraceContext
): FutureUnlessShutdown[Unit] = {
@ -169,11 +170,11 @@ class DbDamlPackageStore(
pkgs.map(pkg =>
DamlPackage(readPackageId(pkg), pkg.toByteArray, pkg.getPayload.size(), uploadedAt)
),
dar.map(_.descriptor),
dar.descriptor,
sourceDescription,
)
val writeDar: List[WriteOnly[Int]] = dar.map(dar => appendToDarStore(dar)).toList
val writeDar: List[WriteOnly[Int]] = List(appendToDarStore(dar))
// Combine all the operations into a single transaction to avoid partial insertions.
val writeDarAndPackages = DBIO

View File

@ -8,7 +8,7 @@ import cats.data.OptionT
import com.daml.daml_lf_dev.DamlLf
import com.daml.lf.data.Ref.PackageId
import com.digitalasset.canton.LfPackageId
import com.digitalasset.canton.config.CantonRequireTypes.{String255, String256M}
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.crypto.Hash
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.discard.Implicits.DiscardOps
@ -33,8 +33,8 @@ class InMemoryDamlPackageStore(override protected val loggerFactory: NamedLogger
import DamlPackageStore.*
private val pkgData
: concurrent.Map[LfPackageId, (DamlLf.Archive, String256M, CantonTimestamp, Int)] =
new ConcurrentHashMap[LfPackageId, (DamlLf.Archive, String256M, CantonTimestamp, Int)].asScala
: concurrent.Map[LfPackageId, (DamlLf.Archive, String255, CantonTimestamp, Int)] =
new ConcurrentHashMap[LfPackageId, (DamlLf.Archive, String255, CantonTimestamp, Int)].asScala
private val darData: concurrent.Map[Hash, (Array[Byte], String255)] =
new ConcurrentHashMap[Hash, (Array[Byte], String255)].asScala
@ -45,14 +45,11 @@ class InMemoryDamlPackageStore(override protected val loggerFactory: NamedLogger
override def append(
pkgs: List[DamlLf.Archive],
uploadedAt: CantonTimestamp,
sourceDescription: String256M,
dar: Option[PackageService.Dar],
sourceDescription: String255,
dar: PackageService.Dar,
)(implicit
traceContext: TraceContext
): FutureUnlessShutdown[Unit] = {
val pkgIds = pkgs.map(readPackageId)
pkgs.foreach { pkgArchive =>
val packageId = readPackageId(pkgArchive)
val packageSize = pkgArchive.getPayload.size()
@ -71,12 +68,10 @@ class InMemoryDamlPackageStore(override protected val loggerFactory: NamedLogger
.discard
}
dar.foreach { dar =>
darData.put(dar.descriptor.hash, (dar.bytes.clone(), dar.descriptor.name)).discard
val hash = dar.descriptor.hash
val pkgS = pkgIds.toSet
darPackages.updateWith(hash)(optSet => Some(optSet.fold(pkgS)(_.union(pkgS))))
}
darData.put(dar.descriptor.hash, (dar.bytes.clone(), dar.descriptor.name)).discard
val hash = dar.descriptor.hash
val pkgS = pkgs.view.map(readPackageId).toSet
darPackages.updateWith(hash)(optSet => Some(optSet.fold(pkgS)(_.union(pkgS)))).discard
FutureUnlessShutdown.unit
}
@ -169,5 +164,5 @@ class InMemoryDamlPackageStore(override protected val loggerFactory: NamedLogger
}
object InMemoryDamlPackageStore {
val defaultPackageDescription = String256M.tryCreate("default")
val defaultPackageDescription = String255.tryCreate("default")
}

View File

@ -9,14 +9,13 @@ import com.daml.error.ContextualizedErrorLogger
import com.daml.lf.archive.Decode
import com.daml.timer.FutureCheck.*
import com.digitalasset.canton.LfPackageId
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.config.{PackageMetadataViewConfig, ProcessingTimeout}
import com.digitalasset.canton.discard.Implicits.DiscardOps
import com.digitalasset.canton.ledger.error.{CommonErrors, PackageServiceErrors}
import com.digitalasset.canton.lifecycle.{FlagCloseable, FutureUnlessShutdown}
import com.digitalasset.canton.logging.{NamedLoggerFactory, NamedLogging}
import com.digitalasset.canton.participant.admin.PackageService
import com.digitalasset.canton.participant.store.DamlPackageStore
import com.digitalasset.canton.platform.indexer.PackageMetadataViewConfig
import com.digitalasset.canton.platform.store.packagemeta.PackageMetadata
import com.digitalasset.canton.platform.store.packagemeta.PackageMetadata.Implicits.packageMetadataSemigroup
import com.digitalasset.canton.time.Clock

View File

@ -37,7 +37,6 @@ import com.digitalasset.canton.ledger.error.groups.RequestValidationErrors
import com.digitalasset.canton.ledger.participant.state
import com.digitalasset.canton.ledger.participant.state.ReadService.ConnectedDomainResponse
import com.digitalasset.canton.ledger.participant.state.*
import com.digitalasset.canton.ledger.participant.state.index.PackageDetails
import com.digitalasset.canton.lifecycle.*
import com.digitalasset.canton.logging.{ErrorLoggingContext, NamedLoggerFactory, NamedLogging}
import com.digitalasset.canton.networking.grpc.CantonGrpcUtil.GrpcErrors
@ -697,20 +696,8 @@ class CantonSyncService(
override def listLfPackages()(implicit
traceContext: TraceContext
): Future[Map[PackageId, PackageDetails]] =
packageService.value
.listPackages()
.map(
_.view
.map { pkgDesc =>
pkgDesc.packageId -> PackageDetails(
size = pkgDesc.packageSize.toLong,
knownSince = pkgDesc.uploadedAt.underlying,
sourceDescription = Some(pkgDesc.sourceDescription.str),
)
}
.toMap
)
): Future[Seq[PackageDescription]] =
packageService.value.listPackages()
override def getPackageMetadataSnapshot(implicit
contextualizedErrorLogger: ContextualizedErrorLogger

View File

@ -16,8 +16,8 @@ import com.daml.lf.testing.parser.Implicits.SyntaxHelper
import com.daml.lf.testing.parser.ParserParameters
import com.digitalasset.canton.buildinfo.BuildInfo
import com.digitalasset.canton.concurrent.FutureSupervisor
import com.digitalasset.canton.config.CantonRequireTypes.{String255, String256M}
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.config.{PackageMetadataViewConfig, ProcessingTimeout}
import com.digitalasset.canton.crypto.provider.symbolic.SymbolicPureCrypto
import com.digitalasset.canton.crypto.{Hash, HashAlgorithm, HashPurpose}
import com.digitalasset.canton.data.CantonTimestamp
@ -30,7 +30,6 @@ import com.digitalasset.canton.participant.metrics.ParticipantTestMetrics
import com.digitalasset.canton.participant.store.DamlPackageStore
import com.digitalasset.canton.participant.store.memory.InMemoryDamlPackageStore
import com.digitalasset.canton.participant.util.DAMLe
import com.digitalasset.canton.platform.indexer.PackageMetadataViewConfig
import com.digitalasset.canton.protocol.PackageDescription
import com.digitalasset.canton.time.SimClock
import com.digitalasset.canton.topology.DefaultTestIdentities
@ -106,7 +105,7 @@ class PackageServiceTest
test(env)
}
private lazy val cantonExamplesDescription = String256M.tryCreate("CantonExamples")
private lazy val cantonExamplesDescription = String255.tryCreate("CantonExamples")
private lazy val expectedPackageIdsAndState: Seq[PackageDescription] =
examplePackages
.map { pkg =>

View File

@ -5,7 +5,7 @@ package com.digitalasset.canton.participant.store
import cats.syntax.parallel.*
import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorService
import com.digitalasset.canton.config.CantonRequireTypes.{String255, String256M}
import com.digitalasset.canton.config.CantonRequireTypes.String255
import com.digitalasset.canton.crypto.{Hash, HashAlgorithm, HashPurpose, TestHash}
import com.digitalasset.canton.data.CantonTimestamp
import com.digitalasset.canton.participant.admin.PackageService.{Dar, DarDescriptor}
@ -45,9 +45,10 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
val darFile = new File(darPath)
val darData = Files.readAllBytes(darFile.toPath)
val hash = TestHash.digest("hash")
val dar = Dar(DarDescriptor(hash, darName), darData)
val testDescription = String256M.tryCreate("test")
val testDescription2 = String256M.tryCreate("other test description")
val testDescription = String255.tryCreate("test")
val testDescription2 = String255.tryCreate("other test description")
"save, retrieve, and remove a dar" in {
val store = mk()
@ -57,7 +58,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
List(damlPackage),
uploadedAt,
testDescription,
Some(Dar(DarDescriptor(hash, darName), darData)),
dar,
)
.failOnShutdown
result <- store.getDar(hash)
@ -66,7 +67,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
removed <- store.getDar(hash)
pkgStillExists <- store.getPackage(packageId)
} yield {
result shouldBe Some(Dar(DarDescriptor(hash, darName), darData))
result shouldBe Some(dar)
pkg shouldBe Some(damlPackage)
removed shouldBe None
pkgStillExists shouldBe Some(damlPackage)
@ -81,7 +82,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
List(damlPackage),
uploadedAt,
testDescription,
Some(Dar(DarDescriptor(hash, darName), darData)),
dar,
)
.failOnShutdown
result <- store.listDars()
@ -92,7 +93,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
val store = mk()
val dar = Dar(DarDescriptor(hash, darName), "dar contents".getBytes)
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, Some(dar)).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
_ = ByteBuffer.wrap(dar.bytes).put("stuff".getBytes)
result <- store.getDar(hash)
} yield result shouldBe Some(Dar(DarDescriptor(hash, darName), "dar contents".getBytes))
@ -104,7 +105,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
for {
_ <- Future.sequence(
(0 until 4).map(_ =>
store.append(List(damlPackage), uploadedAt, testDescription, Some(dar)).failOnShutdown
store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
)
)
result <- store.getDar(hash)
@ -136,7 +137,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
pkgsDar1,
uploadedAt,
testDescription,
Some(Dar(DarDescriptor(hash, darName), darData)),
dar,
)
.failOnShutdown
@ -146,7 +147,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
pkgsDar2,
uploadedAt,
testDescription,
Some(Dar(DarDescriptor(hash2, darName2), darData)),
Dar(DarDescriptor(hash2, darName2), darData),
)
.failOnShutdown
@ -169,7 +170,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
pkg <- store.getPackage(readPackageId(remainingPackage))
// Sanity check that the resulting state is sensible
_ = dar shouldBe Some(Dar(DarDescriptor(hash, darName), darData))
_ = dar shouldBe dar
_ = pkg shouldBe Some(remainingPackage)
// Cleanup for the next iteration
@ -189,7 +190,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"save and retrieve one Daml Package" in {
val store = mk()
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
result <- store.getPackage(packageId)
} yield {
result shouldBe Some(damlPackage)
@ -200,7 +201,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
val store = mk()
for {
_ <- store
.append(List(damlPackage, damlPackage2), uploadedAt, testDescription, None)
.append(List(damlPackage, damlPackage2), uploadedAt, testDescription, dar)
.failOnShutdown
resPkg2 <- store.getPackage(packageId2)
resPkg1 <- store.getPackage(packageId)
@ -213,7 +214,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"list package id and state of stored packages" in {
val store = mk()
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
result <- store.listPackages()
} yield result should contain only PackageDescription(
packageId,
@ -228,7 +229,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
for {
_ <- Future.sequence(
(0 until 4).map(_ =>
store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
)
)
result <- store.getPackage(packageId)
@ -240,7 +241,7 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"list package ids by state" in {
val store = mk()
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
result <- store.listPackages()
} yield result.loneElement shouldBe PackageDescription(
packageId,
@ -253,11 +254,11 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"list package id and state of stored packages where sourceDescription is empty" in {
val store = mk()
for {
_ <- store.append(List(damlPackage), uploadedAt, String256M.empty, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, String255.empty, dar).failOnShutdown
result <- store.listPackages()
} yield result should contain only PackageDescription(
packageId,
String256M.tryCreate("default"),
String255.tryCreate("default"),
uploadedAt,
damlPackageSize1,
)
@ -266,15 +267,15 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"update a package description when (and only when) it is provided" in {
val store = mk()
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
pkg1 <- store.getPackageDescription(packageId)
// Appending the same package with a new description should update it
_ <- store.append(List(damlPackage), uploadedAt, testDescription2, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription2, dar).failOnShutdown
pkg2 <- store.getPackageDescription(packageId)
// Appending the same package without providing a description should leave it unchanged
_ <- store.append(List(damlPackage), uploadedAt, String256M.empty, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, String255.empty, dar).failOnShutdown
pkg3 <- store.getPackageDescription(packageId)
// There are no duplicates
@ -300,10 +301,10 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
"list several packages with a limit" in {
val store = mk()
val test2Description = String256M.tryCreate("test2")
val test2Description = String255.tryCreate("test2")
for {
_ <- store.append(List(damlPackage), uploadedAt, testDescription, None).failOnShutdown
_ <- store.append(List(damlPackage2), uploadedAt, test2Description, None).failOnShutdown
_ <- store.append(List(damlPackage), uploadedAt, testDescription, dar).failOnShutdown
_ <- store.append(List(damlPackage2), uploadedAt, test2Description, dar).failOnShutdown
result1 <- store.listPackages(Some(1))
result2 <- store.listPackages(Some(2))
} yield {
@ -335,11 +336,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
fivePkgs,
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
),
)
.failOnShutdown
@ -349,11 +348,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
missing3,
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
),
)
.failOnShutdown
@ -381,11 +378,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
List(pkg3),
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
),
)
.failOnShutdown
@ -441,11 +436,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
fivePkgs,
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
),
)
.failOnShutdown
@ -454,11 +447,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
fivePkgs,
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor2,
DamlPackageStoreTest.descriptor2.name.str.getBytes,
),
)
.failOnShutdown
@ -498,11 +489,9 @@ trait DamlPackageStoreTest extends AsyncWordSpec with BaseTest with HasExecution
missing2and3,
uploadedAt,
testDescription,
dar = Some(
Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
)
dar = Dar(
DamlPackageStoreTest.descriptor,
DamlPackageStoreTest.descriptor.name.str.getBytes,
),
)
.failOnShutdown

View File

@ -1 +1 @@
20240605.13441.v30eeaac4
20240606.13448.v43783c88

View File

@ -16,10 +16,11 @@ import com.daml.ledger.api.v2.event.InterfaceView
import com.daml.ledger.api.v2.testing.time_service.TimeServiceGrpc.TimeServiceStub
import com.daml.ledger.api.v2.testing.time_service.{GetTimeRequest, SetTimeRequest, TimeServiceGrpc}
import com.daml.ledger.api.v2.transaction.TreeEvent
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.TransactionFilter
import com.daml.ledger.api.v2.transaction_filter.{
CumulativeFilter,
Filters,
InclusiveFilters,
InterfaceFilter,
TemplateFilter,
}
@ -71,9 +72,11 @@ class GrpcLedgerClient(val grpcClient: LedgerClient, val applicationId: Option[R
templateId: Identifier,
): TransactionFilter = {
val filters = Filters(
Some(
InclusiveFilters(templateFilters =
Seq(TemplateFilter(Some(toApiIdentifier(templateId)), includeCreatedEventBlob = true))
Seq(
CumulativeFilter(
IdentifierFilter.TemplateFilter(
TemplateFilter(Some(toApiIdentifier(templateId)), includeCreatedEventBlob = true)
)
)
)
)
@ -86,9 +89,11 @@ class GrpcLedgerClient(val grpcClient: LedgerClient, val applicationId: Option[R
): TransactionFilter = {
val filters =
Filters(
Some(
InclusiveFilters(
List(InterfaceFilter(Some(toApiIdentifier(interfaceId)), true))
Seq(
CumulativeFilter(
IdentifierFilter.InterfaceFilter(
InterfaceFilter(Some(toApiIdentifier(interfaceId)), true)
)
)
)
)

View File

@ -16,10 +16,11 @@ import com.daml.ledger.api.v2.commands._
import com.daml.ledger.api.v2.event.InterfaceView
import com.daml.ledger.api.v2.testing.time_service.TimeServiceGrpc.TimeServiceStub
import com.daml.ledger.api.v2.testing.time_service.{GetTimeRequest, SetTimeRequest, TimeServiceGrpc}
import com.daml.ledger.api.v2.transaction_filter.CumulativeFilter.IdentifierFilter
import com.daml.ledger.api.v2.transaction_filter.TransactionFilter
import com.daml.ledger.api.v2.transaction_filter.{
CumulativeFilter,
Filters,
InclusiveFilters,
InterfaceFilter,
TemplateFilter,
}
@ -95,9 +96,9 @@ class GrpcLedgerClient(
templateId: Identifier,
): TransactionFilter = {
val filters = Filters(
Some(
InclusiveFilters(templateFilters =
Seq(
Seq(
CumulativeFilter(
IdentifierFilter.TemplateFilter(
TemplateFilter(
Some(toApiIdentifierUpgrades(templateId, false)),
includeCreatedEventBlob = true,
@ -115,9 +116,11 @@ class GrpcLedgerClient(
): TransactionFilter = {
val filters =
Filters(
Some(
InclusiveFilters(
List(InterfaceFilter(Some(toApiIdentifier(interfaceId)), true))
Seq(
CumulativeFilter(
IdentifierFilter.InterfaceFilter(
InterfaceFilter(Some(toApiIdentifier(interfaceId)), true)
)
)
)
)

View File

@ -24,6 +24,7 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.flatspec.AnyFlatSpec
import scala.jdk.CollectionConverters._
import scala.jdk.OptionConverters._
final class UpdateClientImplTest
extends AnyFlatSpec
@ -96,12 +97,13 @@ final class UpdateClientImplTest
val transactionFilter = new FiltersByParty(
Map[String, data.Filter](
"Alice" -> new data.InclusiveFilter(
"Alice" -> new data.CumulativeFilter(
Map.empty.asJava,
Map(
new data.Identifier("p1", "m1", "e1") -> data.Filter.Template.HIDE_CREATED_EVENT_BLOB,
new data.Identifier("p2", "m2", "e2") -> data.Filter.Template.HIDE_CREATED_EVENT_BLOB,
).asJava,
None.toJava,
)
).asJava
)
@ -114,9 +116,11 @@ final class UpdateClientImplTest
val request = transactionService.lastUpdatesRequest.get()
request.beginExclusive shouldBe Some(ParticipantOffset(Absolute("1")))
request.endInclusive shouldBe Some(ParticipantOffset(Absolute("2")))
val filter = request.filter.get.filtersByParty
filter.keySet shouldBe Set("Alice")
filter("Alice").inclusive.get.templateFilters.toSet shouldBe Set(
val filterByParty = request.filter.get.filtersByParty
filterByParty.keySet shouldBe Set("Alice")
filterByParty("Alice").cumulative
.flatMap(_.identifierFilter.templateFilter)
.toSet shouldBe Set(
TemplateFilter(
Some(Identifier("p1", moduleName = "m1", entityName = "e1")),
includeCreatedEventBlob = false,
@ -153,12 +157,13 @@ final class UpdateClientImplTest
val transactionFilter = new FiltersByParty(
Map[String, data.Filter](
"Alice" -> new data.InclusiveFilter(
"Alice" -> new data.CumulativeFilter(
Map.empty.asJava,
Map(
new data.Identifier("p1", "m1", "e1") -> data.Filter.Template.HIDE_CREATED_EVENT_BLOB,
new data.Identifier("p2", "m2", "e2") -> data.Filter.Template.HIDE_CREATED_EVENT_BLOB,
).asJava,
None.toJava,
)
).asJava
)
@ -171,9 +176,11 @@ final class UpdateClientImplTest
val request = transactionService.lastUpdatesTreesRequest.get()
request.beginExclusive shouldBe Some(ParticipantOffset(Absolute("1")))
request.endInclusive shouldBe Some(ParticipantOffset(Absolute("2")))
val filter = request.filter.get.filtersByParty
filter.keySet shouldBe Set("Alice")
filter("Alice").inclusive.get.templateFilters.toSet shouldBe Set(
val filterByParty = request.filter.get.filtersByParty
filterByParty.keySet shouldBe Set("Alice")
filterByParty("Alice").cumulative
.flatMap(_.identifierFilter.templateFilter)
.toSet shouldBe Set(
TemplateFilter(
Some(Identifier("p1", moduleName = "m1", entityName = "e1")),
includeCreatedEventBlob = false,

View File

@ -10,6 +10,7 @@ import com.daml.ledger.api.v2.state_service.GetActiveContractsResponse.ContractE
import com.google.protobuf.timestamp.Timestamp
import scala.jdk.CollectionConverters._
import scala.jdk.OptionConverters._
trait DataLayerHelpers {
@ -51,6 +52,12 @@ trait DataLayerHelpers {
def filterFor(party: String): FiltersByParty =
new FiltersByParty(
Map(party -> (new InclusiveFilter(Map.empty.asJava, Map.empty.asJava): Filter)).asJava
Map(
party -> (new CumulativeFilter(
Map.empty.asJava,
Map.empty.asJava,
Some(Filter.Wildcard.HIDE_CREATED_EVENT_BLOB).toJava,
): Filter)
).asJava
)
}

View File

@ -612,9 +612,9 @@ test("exercise using explicit disclosure", async () => {
filter: {
filtersByParty: {
[ALICE_PARTY]: {
inclusive: {
templateFilters: [
{
cumulative: [
{
templateFilter: {
templateId: {
packageId: buildAndLint.packageId,
moduleName: "Main",
@ -622,8 +622,8 @@ test("exercise using explicit disclosure", async () => {
},
includeCreatedEventBlob: true,
},
],
},
},
],
},
},
},