Multi participant node named logging (#2647) (#3080)

* Upgrade scala-logging and slf4j

* Named logging for multi-participant tests (#2647)
This commit is contained in:
Oliver Seeliger 2019-10-03 18:20:57 +02:00 committed by GitHub
parent 0c72eeaf32
commit c9658c7813
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 492 additions and 274 deletions

View File

@ -1 +1 @@
04d71c97878c9b6768b94fe45457c206b4034bf3 dependencies.yaml
a990d4f4b23566bd7aa98f605265ddf739e959a5 dependencies.yaml

View File

@ -170,7 +170,7 @@ def list_dependencies():
{"artifact": "com.google.j2objc:j2objc-annotations:1.1", "lang": "java", "sha1": "ed28ded51a8b1c6b112568def5f4b455e6809019", "sha256": "2994a7eb78f2710bd3d3bfb639b2c94e219cedac0d4d084d516e78c16dddecf6", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar", "source": {"sha1": "1efdf5b737b02f9b72ebdec4f72c37ec411302ff", "sha256": "2cd9022a77151d0b574887635cdfcdf3b78155b602abc89d7f8e62aba55cfb4f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1-sources.jar"} , "name": "com_google_j2objc_j2objc_annotations", "actual": "@com_google_j2objc_j2objc_annotations//jar", "bind": "jar/com/google/j2objc/j2objc_annotations"},
{"artifact": "com.google.protobuf:protobuf-java-util:3.7.1", "lang": "java", "sha1": "45dc95896cfad26397675fdabef7b032d6db4bb6", "sha256": "818608098c3b959482a6ce785e016192c25719c1c1c44971bad68180d3df3e9f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/protobuf/protobuf-java-util/3.7.1/protobuf-java-util-3.7.1.jar", "source": {"sha1": "2b1e75841c2259d6d6bf2818c20f4ec2d46289bb", "sha256": "7407fca0162b7bb22e02af5578fe9517f9e0d5e011d10add83485fda89f6b43b", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/protobuf/protobuf-java-util/3.7.1/protobuf-java-util-3.7.1-sources.jar"} , "name": "com_google_protobuf_protobuf_java_util", "actual": "@com_google_protobuf_protobuf_java_util//jar", "bind": "jar/com/google/protobuf/protobuf_java_util"},
{"artifact": "com.google.protobuf:protobuf-java:3.8.0", "lang": "java", "sha1": "b5f93103d113540bb848fe9ce4e6819b1f39ee49", "sha256": "94ba90a869ddad07eb49afaa8f39e676c2554b5b1c417ad9e1188257e79be60f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/protobuf/protobuf-java/3.8.0/protobuf-java-3.8.0.jar", "source": {"sha1": "df71e14a1af8ba1ff508528f9057b535ac8b66b2", "sha256": "1982b2a96f752cf0d64c3066b5e9e4016f9cc825591f3d1a9c40a017203fcb2f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/google/protobuf/protobuf-java/3.8.0/protobuf-java-3.8.0-sources.jar"} , "name": "com_google_protobuf_protobuf_java", "actual": "@com_google_protobuf_protobuf_java//jar", "bind": "jar/com/google/protobuf/protobuf_java"},
{"artifact": "com.h2database:h2:1.4.198", "lang": "java", "sha1": "c5693092efef44cc4587828afe12f7f7208a67f4", "sha256": "32dd6b149cb722aa4c2dd4d40a74a9cd41e32ac59a4e755a66e5753660d61d46", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/h2database/h2/1.4.198/h2-1.4.198.jar", "source": {"sha1": "0f0f3e411cc9633b7e34b3cd6cd217a96c31e89b", "sha256": "0bbe35cd781cddbcdfc01f5010f5a273accf5d5f3db0b5958945c8b32cfc08f6", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/h2database/h2/1.4.198/h2-1.4.198-sources.jar"} , "name": "com_h2database_h2", "actual": "@com_h2database_h2//jar", "bind": "jar/com/h2database/h2"},
{"artifact": "com.h2database:h2:1.4.199", "lang": "java", "sha1": "7bf08152984ed8859740ae3f97fae6c72771ae45", "sha256": "3125a16743bc6b4cfbb61abba783203f1fb68230aa0fdc97898f796f99a5d42e", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/h2database/h2/1.4.199/h2-1.4.199.jar", "source": {"sha1": "cda9ec0b80ed4eefc4d1cd37781548725a59a738", "sha256": "88b730bd05e6be6186e814e04aeafc08b119ce0c394cea9e7ed3cec5e7330ac5", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/h2database/h2/1.4.199/h2-1.4.199-sources.jar"} , "name": "com_h2database_h2", "actual": "@com_h2database_h2//jar", "bind": "jar/com/h2database/h2"},
{"artifact": "com.jcraft:jsch:0.1.54", "lang": "java", "sha1": "da3584329a263616e277e15462b387addd1b208d", "sha256": "92eb273a3316762478fdd4fe03a0ce1842c56f496c9c12fe1235db80450e1fdb", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar", "source": {"sha1": "91d6069df9be9e076bdb124e82fc2a9af9547616", "sha256": "49d021dd58f6b455046a07331a68a5e647df354d7f6961b73df298203c43f44a", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/jcraft/jsch/0.1.54/jsch-0.1.54-sources.jar"} , "name": "com_jcraft_jsch", "actual": "@com_jcraft_jsch//jar", "bind": "jar/com/jcraft/jsch"},
{"artifact": "com.jsuereth:scala-arm_2.12:2.0", "lang": "scala", "sha1": "5052bf4ecec97cf14437b24b18f18490c02cbb2e", "sha256": "da57db2ca312fed028689764de75dc28dbabe76e211af4cda86b567593713f10", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/jsuereth/scala-arm_2.12/2.0/scala-arm_2.12-2.0.jar", "source": {"sha1": "f223604e0e61f96e7b615e5a53ca57ce7fbb9093", "sha256": "2f41c0e6ac265026c0630ab413b40848da7c30c8f2deb54b75393215e2219d27", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/jsuereth/scala-arm_2.12/2.0/scala-arm_2.12-2.0-sources.jar"} , "name": "com_jsuereth_scala_arm_2_12", "actual": "@com_jsuereth_scala_arm_2_12//jar:file", "bind": "jar/com/jsuereth/scala_arm_2_12"},
{"artifact": "com.kohlschutter.junixsocket:junixsocket-common:2.0.4", "lang": "java", "sha1": "b4d1870bf903412533e0b79c6fcd402defcfc05b", "sha256": "afc37615ecf7fadff74d29afb443fe4f633d396646d89d825e8224a277839f60", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/kohlschutter/junixsocket/junixsocket-common/2.0.4/junixsocket-common-2.0.4.jar", "source": {"sha1": "93f60af6bcdca6eee8d3ed5914bcfabb9f2618f0", "sha256": "77a47cc4685c93df44b00679fbf9bcf1ca0c8ff642125694f2e02ab93081d23c", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/kohlschutter/junixsocket/junixsocket-common/2.0.4/junixsocket-common-2.0.4-sources.jar"} , "name": "com_kohlschutter_junixsocket_junixsocket_common", "actual": "@com_kohlschutter_junixsocket_junixsocket_common//jar", "bind": "jar/com/kohlschutter/junixsocket/junixsocket_common"},
@ -234,7 +234,7 @@ def list_dependencies():
{"artifact": "com.typesafe.play:anorm-akka_2.12:2.5.3", "lang": "scala", "sha1": "8ef34ae07512c651b548648280445702dda63339", "sha256": "cd89fe5aa5bc20a64476082110cb3d4ea9ffb51001941e21982e580412c53abc", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm-akka_2.12/2.5.3/anorm-akka_2.12-2.5.3.jar", "source": {"sha1": "33f8d37a4ad532ecc8e79f6aa2bcb2907a899b99", "sha256": "1c000f57ce1bf6131288610818c56adf24fbe372fdf444c04b1e636d01e73b3e", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm-akka_2.12/2.5.3/anorm-akka_2.12-2.5.3-sources.jar"} , "name": "com_typesafe_play_anorm_akka_2_12", "actual": "@com_typesafe_play_anorm_akka_2_12//jar:file", "bind": "jar/com/typesafe/play/anorm_akka_2_12"},
{"artifact": "com.typesafe.play:anorm-tokenizer_2.12:2.5.3", "lang": "scala", "sha1": "ba87fa45f82192ffbd57a1b7018b2eed5cc79542", "sha256": "74e556aa6fd60e887aeee2e52e017c6fe9a630cad43e0d0793689c2aaa70fb59", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm-tokenizer_2.12/2.5.3/anorm-tokenizer_2.12-2.5.3.jar", "source": {"sha1": "9ae09fd793d0d6fdf4e3e4783dfc5d69aaadf811", "sha256": "6b502c17f3cc208b28772a42c7c16776cf97adbcca6a8d328b137bcc6e4e50fd", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm-tokenizer_2.12/2.5.3/anorm-tokenizer_2.12-2.5.3-sources.jar"} , "name": "com_typesafe_play_anorm_tokenizer_2_12", "actual": "@com_typesafe_play_anorm_tokenizer_2_12//jar:file", "bind": "jar/com/typesafe/play/anorm_tokenizer_2_12"},
{"artifact": "com.typesafe.play:anorm_2.12:2.5.3", "lang": "scala", "sha1": "baf1eb3488b5d4638169dbdfd084103df15eb7f8", "sha256": "684f456d7c28590669ef012e49cfec032bbb8ebbf42e2c675112a6b748c51809", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm_2.12/2.5.3/anorm_2.12-2.5.3.jar", "source": {"sha1": "6e75d042d2c61068f3e57925105330da8f6fe1bb", "sha256": "69622d61deff12562cb6a97ea8db57baf4d98a01a6c490d5fbab773fcd5e3e8b", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/play/anorm_2.12/2.5.3/anorm_2.12-2.5.3-sources.jar"} , "name": "com_typesafe_play_anorm_2_12", "actual": "@com_typesafe_play_anorm_2_12//jar:file", "bind": "jar/com/typesafe/play/anorm_2_12"},
{"artifact": "com.typesafe.scala-logging:scala-logging_2.12:3.5.0", "lang": "scala", "sha1": "7c3c54941abfc346e412e8a3065b5da3668a4843", "sha256": "b99ef12395309df24d1156cda0d356074bbef7db968c2ce9b2477ac60aae5a2c", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/scala-logging/scala-logging_2.12/3.5.0/scala-logging_2.12-3.5.0.jar", "source": {"sha1": "3ec3a31e0a2cda94c29f7070ff83bc5416c051d1", "sha256": "66c30a1bec8977123c4bef994ba69b586dad58cf1fda94f5632336b4d3704a0a", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/scala-logging/scala-logging_2.12/3.5.0/scala-logging_2.12-3.5.0-sources.jar"} , "name": "com_typesafe_scala_logging_scala_logging_2_12", "actual": "@com_typesafe_scala_logging_scala_logging_2_12//jar:file", "bind": "jar/com/typesafe/scala_logging/scala_logging_2_12"},
{"artifact": "com.typesafe.scala-logging:scala-logging_2.12:3.9.2", "lang": "scala", "sha1": "b1f19bc6774e01debf09bf5f564ad3613687bf49", "sha256": "eb4e31b7785d305b5baf0abd23a64b160e11b8cbe2503a765aa4b01247127dad", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2.jar", "source": {"sha1": "811d21a0165b722a8f18d46b198f8f958105815c", "sha256": "66684d657691bfee01f6a62ac6909a6366b074521645f0bbacb1221e916a8d5f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/scala-logging/scala-logging_2.12/3.9.2/scala-logging_2.12-3.9.2-sources.jar"} , "name": "com_typesafe_scala_logging_scala_logging_2_12", "actual": "@com_typesafe_scala_logging_scala_logging_2_12//jar:file", "bind": "jar/com/typesafe/scala_logging/scala_logging_2_12"},
{"artifact": "com.typesafe.slick:slick-hikaricp_2.12:3.3.0", "lang": "scala", "sha1": "6d8460a8eec43ac02c19ed9874b0fff2ad5d685d", "sha256": "024547cefb74278fb00ee1681565222c06e835c2dc574d1497a020f8e1ba2a45", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/slick/slick-hikaricp_2.12/3.3.0/slick-hikaricp_2.12-3.3.0.jar", "source": {"sha1": "c8cc33aa9e0be5df03de919cb9f6211fe3b27235", "sha256": "6bf4883abeb5dfbd12ffc715dc5a3cf62191f2efa25fbed5712e51e196ca68ba", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/slick/slick-hikaricp_2.12/3.3.0/slick-hikaricp_2.12-3.3.0-sources.jar"} , "name": "com_typesafe_slick_slick_hikaricp_2_12", "actual": "@com_typesafe_slick_slick_hikaricp_2_12//jar:file", "bind": "jar/com/typesafe/slick/slick_hikaricp_2_12"},
{"artifact": "com.typesafe.slick:slick_2.12:3.3.0", "lang": "scala", "sha1": "93b7cbc1c090dfc53a3d09d118f756c0f323a8d6", "sha256": "5c4dee7032eb3d3a9ba0f5fbbeb71658f1b601404c9adc7abf98806f1d3786c5", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/slick/slick_2.12/3.3.0/slick_2.12-3.3.0.jar", "source": {"sha1": "b36d35defd61fdda71f2a240cfb5fcb99345b9f1", "sha256": "635ce75720e6679672c66624c31bd32225709e275a81f1a541910978686f49af", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/com/typesafe/slick/slick_2.12/3.3.0/slick_2.12-3.3.0-sources.jar"} , "name": "com_typesafe_slick_slick_2_12", "actual": "@com_typesafe_slick_slick_2_12//jar:file", "bind": "jar/com/typesafe/slick/slick_2_12"},
# duplicates in com.typesafe:config promoted to 1.3.3
@ -593,14 +593,14 @@ def list_dependencies():
{"artifact": "org.seleniumhq.selenium:selenium-support:3.12.0", "lang": "java", "sha1": "3244e6c89d8708587ae5785723be1330b1e72299", "sha256": "b3c230f5c805fcace7800aad55756f5757d081f2172c8cd438f8516303d29972", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/seleniumhq/selenium/selenium-support/3.12.0/selenium-support-3.12.0.jar", "source": {"sha1": "3da2845356347e83a2e698308dc89f05dd00dcca", "sha256": "984882712d2e031d9866b83c0cedf81c53db8570fece53c5d42d0e7c3a56873a", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/seleniumhq/selenium/selenium-support/3.12.0/selenium-support-3.12.0-sources.jar"} , "name": "org_seleniumhq_selenium_selenium_support", "actual": "@org_seleniumhq_selenium_selenium_support//jar", "bind": "jar/org/seleniumhq/selenium/selenium_support"},
{"artifact": "org.skyscreamer:jsonassert:1.1.1", "lang": "java", "sha1": "e9faeb6ab98837bb79a1e54a673102550a48e81d", "sha256": "e17dfdb37e4c2f1d768bb18654bbcce26294b802b3a4af74949500cf401a8da3", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/skyscreamer/jsonassert/1.1.1/jsonassert-1.1.1.jar", "source": {"sha1": "7e8b1cd999475e3684441ef89d1baee0bbcd8d69", "sha256": "40d6822052258d6e32ee796c6d36cdb05efdfcb4b3a8650880d99372d6e60a3f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/skyscreamer/jsonassert/1.1.1/jsonassert-1.1.1-sources.jar"} , "name": "org_skyscreamer_jsonassert", "actual": "@org_skyscreamer_jsonassert//jar", "bind": "jar/org/skyscreamer/jsonassert"},
{"artifact": "org.slf4j:jcl-over-slf4j:1.7.21", "lang": "java", "sha1": "331b564a3a42f002a0004b039c1c430da89062cd", "sha256": "686b9dab357b7b665b969bbbf3dcdc67edd88ee9500699e893b5e70927be5e3f", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21.jar", "source": {"sha1": "c565ca9c1239b38fe4a70e4101086c4e880f6358", "sha256": "5f9edefe7db1dfd99eb341f253b721e515f82dd3b528aa48630fccd6bb39e3a6", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21-sources.jar"} , "name": "org_slf4j_jcl_over_slf4j", "actual": "@org_slf4j_jcl_over_slf4j//jar", "bind": "jar/org/slf4j/jcl_over_slf4j"},
# duplicates in org.slf4j:slf4j-api fixed to 1.7.25
# duplicates in org.slf4j:slf4j-api fixed to 1.7.26
# - ch.qos.logback:logback-classic:1.2.3 wanted version 1.7.25
# - com.eed3si9n:gigahorse-core_2.12:0.3.0 wanted version 1.7.25
# - com.github.cb372:scalacache-caffeine_2.12:0.20.0 wanted version 1.7.25
# - com.github.cb372:scalacache-core_2.12:0.20.0 wanted version 1.7.25
# - com.sparkjava:spark-core:2.7.2 wanted version 1.7.13
# - com.typesafe.akka:akka-slf4j_2.12:2.5.23 wanted version 1.7.25
# - com.typesafe.scala-logging:scala-logging_2.12:3.5.0 wanted version 1.7.21
# - com.typesafe.scala-logging:scala-logging_2.12:3.9.2 wanted version 1.7.25
# - com.typesafe.slick:slick_2.12:3.3.0 wanted version 1.7.25
# - com.zaxxer:HikariCP:3.2.0 wanted version 1.7.25
# - io.dropwizard.metrics:metrics-core:4.0.0 wanted version 1.7.25
@ -610,12 +610,12 @@ def list_dependencies():
# - org.rnorth:tcp-unix-socket-proxy:1.0.1 wanted version 1.7.21
# - org.slf4j:jcl-over-slf4j:1.7.21 wanted version 1.7.21
# - org.slf4j:slf4j-ext:1.7.25 wanted version 1.7.25
# - org.slf4j:slf4j-simple:1.7.25 wanted version 1.7.25
# - org.slf4j:slf4j-simple:1.7.26 wanted version 1.7.26
# - org.testcontainers:testcontainers:1.4.2 wanted version 1.7.25
# - org.zeroturnaround:zt-exec:1.8 wanted version 1.7.2
{"artifact": "org.slf4j:slf4j-api:1.7.25", "lang": "java", "sha1": "da76ca59f6a57ee3102f8f9bd9cee742973efa8a", "sha256": "18c4a0095d5c1da6b817592e767bb23d29dd2f560ad74df75ff3961dbde25b79", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar", "source": {"sha1": "962153db4a9ea71b79d047dfd1b2a0d80d8f4739", "sha256": "c4bc93180a4f0aceec3b057a2514abe04a79f06c174bbed910a2afb227b79366", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25-sources.jar"} , "name": "org_slf4j_slf4j_api", "actual": "@org_slf4j_slf4j_api//jar", "bind": "jar/org/slf4j/slf4j_api"},
{"artifact": "org.slf4j:slf4j-api:1.7.26", "lang": "java", "sha1": "77100a62c2e6f04b53977b9f541044d7d722693d", "sha256": "6d9e5b86cfd1dd44c676899285b5bb4fa0d371cf583e8164f9c8a0366553242b", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26.jar", "source": {"sha1": "021c84cdf9da108216b5e402611d4af479b60cb8", "sha256": "9e25ad98a324e6685752fd01fbbd0588ceec5df564e53c49486946a2d19dc482", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26-sources.jar"} , "name": "org_slf4j_slf4j_api", "actual": "@org_slf4j_slf4j_api//jar", "bind": "jar/org/slf4j/slf4j_api"},
{"artifact": "org.slf4j:slf4j-ext:1.7.25", "lang": "java", "sha1": "d1685f86e2becda32ad8bb823b8cc4990eb45f69", "sha256": "9df4ea4d390eb559f153716fa77658c26540f4f83635973b8c0e0da70d6fa944", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-ext/1.7.25/slf4j-ext-1.7.25.jar", "source": {"sha1": "da5cead7cf1463cabec6cd56a9b8936037711e0c", "sha256": "784e1fe8ad73ccb3a2df7eb76c4f55704f91986aeda394c0b9a77d3fe5e17ae6", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-ext/1.7.25/slf4j-ext-1.7.25-sources.jar"} , "name": "org_slf4j_slf4j_ext", "actual": "@org_slf4j_slf4j_ext//jar", "bind": "jar/org/slf4j/slf4j_ext"},
{"artifact": "org.slf4j:slf4j-simple:1.7.25", "lang": "java", "sha1": "8dacf9514f0c707cbbcdd6fd699e8940d42fb54e", "sha256": "0966e86fffa5be52d3d9e7b89dd674d98a03eed0a454fbaf7c1bd9493bd9d874", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-simple/1.7.25/slf4j-simple-1.7.25.jar", "source": {"sha1": "af3cd3ad1ea4b08b27b54f12529b4bf343bc5ca1", "sha256": "2cfa254e77c6f41bdcd8500c61c0f6b9959de66835d2b598102d38c2a807f367", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-simple/1.7.25/slf4j-simple-1.7.25-sources.jar"} , "name": "org_slf4j_slf4j_simple", "actual": "@org_slf4j_slf4j_simple//jar", "bind": "jar/org/slf4j/slf4j_simple"},
{"artifact": "org.slf4j:slf4j-simple:1.7.26", "lang": "java", "sha1": "dfb0de47f433c2a37dd44449c88d84b698cd5cf7", "sha256": "4b8ed75e2273850bf4eeb411ae5de5e0c0a44da59a96ca68d284749a6a373678", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-simple/1.7.26/slf4j-simple-1.7.26.jar", "source": {"sha1": "e4804203f5f65c4762798b172418e0812d29b8c3", "sha256": "342b07260d82e5e79e0338daf15cb960b515c6831025d9b5303ca04f0e5a47f5", "repository": "http://central.maven.org/maven2/", "url": "http://central.maven.org/maven2/org/slf4j/slf4j-simple/1.7.26/slf4j-simple-1.7.26-sources.jar"} , "name": "org_slf4j_slf4j_simple", "actual": "@org_slf4j_slf4j_simple//jar", "bind": "jar/org/slf4j/slf4j_simple"},
# duplicates in org.spire-math:jawn-parser_2.12 promoted to 0.13.0
# - com.eed3si9n:sjson-new-scalajson_2.12:0.8.2 wanted version 0.10.4
# - io.circe:circe-jawn_2.12:0.10.0 wanted version 0.13.0

View File

@ -54,7 +54,7 @@ dependencies:
com.typesafe.scala-logging:
scala-logging:
lang: scala
version: "3.5.0"
version: "3.9.2"
org.scalaz:
scalaz-core:
@ -94,10 +94,10 @@ dependencies:
org.slf4j:
slf4j-api:
lang: java
version: "1.7.25"
version: "1.7.26"
slf4j-simple:
lang: java
version: "1.7.25"
version: "1.7.26"
commons-io:
commons-io:

View File

@ -97,7 +97,7 @@ class DomainTransactionMapper(decoder: DecoderType) extends LazyLogging {
private def logAndDiscard(event: CreatedEvent)(
err: EventDecoderError): Either[InputValidationError, Option[DomainEvent]] = {
// TODO: improve error handling (make discarding error log message configurable)
logger.warn(s"Unhandled create event $event. Error: $err")
logger.warn(s"Unhandled create event ${event.toString}. Error: ${err.toString}")
Right(None)
}

View File

@ -40,9 +40,9 @@ object Main extends StrictLogging {
LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) match {
case a: ch.qos.logback.classic.Logger =>
a.setLevel(verbosity)
logger.info(s"${codegenId: String} verbosity: $verbosity")
logger.info(s"${codegenId: String} verbosity: ${verbosity.toString}")
case _ =>
logger.warn(s"${codegenId: String} cannot set requested verbosity: $verbosity")
logger.warn(s"${codegenId: String} cannot set requested verbosity: ${verbosity.toString}")
}
}

View File

@ -36,7 +36,7 @@ object DamlContractTemplateGen {
val syntaxIdDecl = LFUtil.toCovariantTypeDef(" ExOn")
val syntaxIdType = TypeName(" ExOn")
logger.debug(s"generate templateDecl: $templateName, $templateInterface")
logger.debug(s"generate templateDecl: ${templateName.toString}, ${templateInterface.toString}")
val templateChoiceMethods = templateInterface.template.choices.flatMap {
case (id, interface) =>

View File

@ -102,20 +102,20 @@ object IouMain extends App with StrictLogging {
"USD",
BigDecimal("99999.00"))
_ <- clientUtil.submitCommand(issuer, issuerWorkflowId, createCmd)
_ = logger.info(s"$issuer sent create command: $createCmd")
_ = logger.info(s"$issuer sent create command: ${createCmd.toString}")
tx0 <- clientUtil.nextTransaction(issuer, offset0)(amat)
_ = logger.info(s"$issuer received transaction: $tx0")
_ = logger.info(s"$issuer received transaction: ${tx0.toString}")
createdEvent <- toFuture(decodeCreatedEvent(tx0))
_ = logger.info(s"$issuer received created event: $createdEvent")
_ = logger.info(s"$issuer received created event: ${createdEvent.toString}")
exerciseCmd = IouCommands.iouTransferExerciseCommand(
iouTemplateId,
createdEvent.contractId,
newOwner)
_ <- clientUtil.submitCommand(issuer, issuerWorkflowId, exerciseCmd)
_ = logger.info(s"$issuer sent exercise command: $exerciseCmd")
_ = logger.info(s"$issuer sent exercise command: ${exerciseCmd.toString}")
} yield ()

View File

@ -18,6 +18,7 @@ da_scala_library(
deps = [
"//3rdparty/jvm/com/google/protobuf:protobuf_java",
"//3rdparty/jvm/com/typesafe/akka:akka_stream",
"//3rdparty/jvm/com/typesafe/scala_logging",
"//3rdparty/jvm/io/grpc:grpc_netty",
"//3rdparty/jvm/io/grpc:grpc_services",
"//3rdparty/jvm/io/netty:netty_handler",

View File

@ -12,6 +12,7 @@ import com.daml.ledger.participant.state.kvutils.InMemoryKVParticipantState
import com.daml.ledger.participant.state.v1.ParticipantId
import com.digitalasset.daml.lf.archive.DarReader
import com.digitalasset.daml_lf.DamlLf.Archive
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.index.{StandaloneIndexServer, StandaloneIndexerServer}
import org.slf4j.LoggerFactory
@ -54,8 +55,10 @@ object ReferenceServer extends App {
} yield ledger.uploadPackages(dar.all, None)
}
val indexerServer = StandaloneIndexerServer(readService, config)
val indexServer = StandaloneIndexServer(config, readService, writeService).start()
val participantLoggerFactory = NamedLoggerFactory.forParticipant(participantId)
val indexerServer = StandaloneIndexerServer(readService, config, participantLoggerFactory)
val indexServer =
StandaloneIndexServer(config, readService, writeService, participantLoggerFactory).start()
val extraParticipants =
for {
@ -66,11 +69,15 @@ object ReferenceServer extends App {
participantId = participantId,
jdbcUrl = jdbcUrl
)
val extraIndexer = StandaloneIndexerServer(readService, participantConfig)
val participantLoggerFactory =
NamedLoggerFactory.forParticipant(participantConfig.participantId)
val extraIndexer =
StandaloneIndexerServer(readService, participantConfig, participantLoggerFactory)
val extraLedgerApiServer = StandaloneIndexServer(
participantConfig,
readService,
writeService
writeService,
participantLoggerFactory
)
(extraIndexer, extraLedgerApiServer.start())
}

View File

@ -15,6 +15,7 @@ import com.digitalasset.grpc.adapter.{AkkaExecutionSequencerPool, ExecutionSeque
import com.digitalasset.ledger.api.domain
import com.digitalasset.ledger.api.v1.command_completion_service.CompletionEndRequest
import com.digitalasset.ledger.client.services.commands.CommandSubmissionFlow
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.server.services.command.ApiCommandService
import com.digitalasset.platform.server.services.identity.ApiLedgerIdentityService
import io.grpc.BindableService
@ -24,13 +25,11 @@ import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.handler.ssl.SslContext
import io.netty.util.concurrent.DefaultThreadFactory
import org.slf4j.LoggerFactory
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.concurrent.duration._
object Server {
private val logger = LoggerFactory.getLogger(this.getClass)
def apply(
serverPort: Int,
@ -48,15 +47,22 @@ object Server {
Runtime.getRuntime.availableProcessors() * 8
)(materializer.system)
// Named logger includes participant identifier in every log message which helps debug multi-node scenarios
val loggerFactory = NamedLoggerFactory.forParticipant(s"ParticipantWithPort$serverPort")
new Server(
serverEsf,
serverPort,
sslContext,
createServices(indexService, writeService),
)
createServices(indexService, writeService, loggerFactory),
loggerFactory
),
}
private def createServices(indexService: IndexService, writeService: WriteService)(
private def createServices(
indexService: IndexService,
writeService: WriteService,
loggerFactory: NamedLoggerFactory)(
implicit mat: ActorMaterializer,
serverEsf: ExecutionSequencerFactory
): List[BindableService] = {
@ -68,7 +74,7 @@ object Server {
10.seconds
)
val engine = Engine()
logger.info(EngineInfo.show)
loggerFactory.getLogger(this.getClass).info(EngineInfo.show)
val submissionService =
DamlOnXSubmissionService.create(ledgerId, indexService, writeService, engine)
@ -83,7 +89,9 @@ object Server {
DamlOnXTransactionService.create(ledgerId, indexService)
val identityService =
ApiLedgerIdentityService.create(() => Future.successful(domain.LedgerId(ledgerId)))
ApiLedgerIdentityService.create(
() => Future.successful(domain.LedgerId(ledgerId)),
loggerFactory)
// FIXME(JM): hard-coded values copied from SandboxConfig.
val commandService = ApiCommandService.create(
@ -109,7 +117,8 @@ object Server {
() => commandCompletionService.completionEnd(CompletionEndRequest(ledgerId)),
transactionService.getTransactionById,
transactionService.getFlatTransactionById
)
),
loggerFactory
)
val packageService = DamlOnXPackageService(indexService, ledgerId)
@ -145,9 +154,10 @@ final class Server private (
serverEsf: AkkaExecutionSequencerPool,
serverPort: Int,
sslContext: Option[SslContext],
services: Iterable[BindableService])(implicit materializer: ActorMaterializer)
services: Iterable[BindableService],
loggerFactory: NamedLoggerFactory)(implicit materializer: ActorMaterializer)
extends AutoCloseable {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
private val workerEventLoopGroup: NioEventLoopGroup = {
val parallelism = Runtime.getRuntime.availableProcessors

View File

@ -0,0 +1,56 @@
// Copyright (c) 2019 The DAML Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.common.logging
import org.slf4j.{Logger, LoggerFactory}
/**
* NamedLoggerFactory augments a regular class-based slf4j logger with one annotated with a "name" where the name provides
* human readable context identifying a class instance, e.g. which participant in a set of participants has logged
* a particular message.
*
* The name can be constructed in a nested, left-to-right append manner.
*/
trait NamedLoggerFactory {
/** Name for the logger. Can be empty. */
val name: String
/** Augment the name with another sub-name. Useful when transitioning from one caller to one specific named callee */
def append(subName: String): NamedLoggerFactory
def forParticipant(id: String): NamedLoggerFactory = append(s"participant/$id")
private[logging] def getLogger(fullName: String): Logger
/** get a loggers in factory methods
*
* Sometimes, the NamedLogging trait can not be used, e.g. in a factory method. In these
* cases, a logger can be created using this function.
*/
def getLogger(klass: Class[_]): Logger = {
val fullName = Array(klass.getName, name)
.filterNot(_.isEmpty)
.mkString(":")
getLogger(fullName)
}
}
// Named Logger Factory implementation
private[logging] final class SimpleNamedLoggerFactory(val name: String) extends NamedLoggerFactory {
override def append(subName: String): NamedLoggerFactory =
if (name.isEmpty) new SimpleNamedLoggerFactory(subName)
else new SimpleNamedLoggerFactory(s"$name/$subName")
override private[logging] def getLogger(fullName: String) = LoggerFactory.getLogger(fullName)
}
object NamedLoggerFactory {
def apply(name: String): NamedLoggerFactory = new SimpleNamedLoggerFactory(name)
def apply(cls: Class[_]): NamedLoggerFactory = apply(cls.getSimpleName)
def forParticipant(name: String): NamedLoggerFactory = root.forParticipant(name)
def root: NamedLoggerFactory = NamedLoggerFactory("")
}

View File

@ -9,58 +9,55 @@ import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Keep, Source}
import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.ledger.api.v1.command_completion_service.CommandCompletionServiceGrpc.CommandCompletionService
import com.digitalasset.ledger.api.v1.command_completion_service.{
CompletionEndResponse,
CompletionStreamRequest,
CompletionStreamResponse
}
import com.digitalasset.ledger.api.v1.command_service._
import com.digitalasset.ledger.api.v1.command_submission_service.CommandSubmissionServiceGrpc.CommandSubmissionService
import com.digitalasset.ledger.api.v1.command_submission_service.SubmitRequest
import com.digitalasset.ledger.api.v1.completion.Completion
import com.digitalasset.ledger.api.v1.transaction_service.TransactionServiceGrpc.TransactionService
import com.digitalasset.ledger.api.v1.transaction_service.{
GetFlatTransactionResponse,
GetTransactionByIdRequest,
GetTransactionResponse
}
import com.digitalasset.ledger.client.configuration.CommandClientConfiguration
import com.digitalasset.ledger.client.services.commands.{
CommandClient,
CommandCompletionSource,
CommandTrackerFlow
}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.server.api.ApiException
import com.digitalasset.platform.server.api.services.grpc.GrpcCommandService
import com.digitalasset.platform.server.services.command.ApiCommandService.LowLevelCommandServiceAccess
import com.digitalasset.util.Ctx
import com.digitalasset.util.akkastreams.MaxInFlight
import com.google.protobuf.empty.Empty
import io.grpc._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.Try
import scalaz.syntax.tag._
class ApiCommandService private (
lowLevelCommandServiceAccess: LowLevelCommandServiceAccess,
configuration: ApiCommandService.Configuration)(
configuration: ApiCommandService.Configuration,
loggerFactory: NamedLoggerFactory)(
implicit grpcExecutionContext: ExecutionContext,
actorMaterializer: ActorMaterializer,
esf: ExecutionSequencerFactory)
extends CommandServiceGrpc.CommandService
with AutoCloseable {
private val logger = LoggerFactory.getLogger(this.getClass.getName)
private val logger = loggerFactory.getLogger(this.getClass)
private type CommandId = String
private type ApplicationId = String
private val submissionTracker: TrackerMap = TrackerMap(configuration.retentionPeriod)
private val submissionTracker: TrackerMap =
TrackerMap(configuration.retentionPeriod, loggerFactory)
private val staleCheckerInterval: FiniteDuration = 30.seconds
private val trackerCleanupJob: Cancellable = actorMaterializer.system.scheduler.schedule(
@ -78,22 +75,6 @@ class ApiCommandService private (
submissionTracker.close()
}
private def commandClient(
applicationId: ApplicationId,
submissionStub: CommandSubmissionService,
completionStub: CommandCompletionService) =
new CommandClient(
submissionStub,
completionStub,
configuration.ledgerId,
applicationId,
CommandClientConfiguration(
configuration.maxCommandsInFlight,
configuration.maxParallelSubmissions,
false,
java.time.Duration.ofMillis(configuration.commandTtl.toMillis))
)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
private def submitAndWaitInternal(request: SubmitAndWaitRequest): Future[Completion] = {
@ -106,12 +87,6 @@ class ApiCommandService private (
for {
trackingFlow <- {
lowLevelCommandServiceAccess match {
case LowLevelCommandServiceAccess.RemoteServices(submission, completion, _) =>
val client = commandClient(appId, submission, completion)
if (configuration.limitMaxCommandsInFlight)
client.trackCommands[Promise[Completion]](List(submitter.party))
else
client.trackCommandsUnbounded[Promise[Completion]](List(submitter.party))
case LowLevelCommandServiceAccess.LocalServices(
submissionFlow,
getCompletionSource,
@ -190,9 +165,6 @@ class ApiCommandService private (
private val (treeById, flatById) = {
lowLevelCommandServiceAccess match {
case LowLevelCommandServiceAccess.RemoteServices(_, _, transaction) =>
(transaction.getTransactionById _, transaction.getFlatTransactionById _)
case LowLevelCommandServiceAccess.LocalServices(
_,
_,
@ -206,13 +178,16 @@ class ApiCommandService private (
object ApiCommandService {
def create(configuration: Configuration, svcAccess: LowLevelCommandServiceAccess)(
def create(
configuration: Configuration,
svcAccess: LowLevelCommandServiceAccess,
loggerFactory: NamedLoggerFactory)(
implicit grpcExecutionContext: ExecutionContext,
actorMaterializer: ActorMaterializer,
esf: ExecutionSequencerFactory
): CommandServiceGrpc.CommandService with BindableService with CommandServiceLogging =
new GrpcCommandService(
new ApiCommandService(svcAccess, configuration),
new ApiCommandService(svcAccess, configuration, loggerFactory),
configuration.ledgerId
) with CommandServiceLogging
@ -230,12 +205,6 @@ object ApiCommandService {
object LowLevelCommandServiceAccess {
final case class RemoteServices(
submissionStub: CommandSubmissionService,
completionStub: CommandCompletionService,
transactionService: TransactionService)
extends LowLevelCommandServiceAccess
final case class LocalServices(
submissionFlow: Flow[
Ctx[(Promise[Completion], String), SubmitRequest],

View File

@ -7,10 +7,10 @@ import java.util.concurrent.atomic.AtomicReference
import com.digitalasset.ledger.api.v1.command_service.SubmitAndWaitRequest
import com.digitalasset.ledger.api.v1.completion.Completion
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.server.services.command.TrackerMap.{AsyncResource, Key}
import com.github.ghik.silencer.silent
import com.typesafe.scalalogging.LazyLogging
import scala.collection.immutable.HashMap
import scala.concurrent.duration.{FiniteDuration, _}
@ -21,7 +21,10 @@ import scala.util.{Failure, Success}
* A map for [[Tracker]]s with thread-safe tracking methods and automatic cleanup. A tracker tracker, if you will.
* @param retentionPeriod The minimum finite duration for which to retain idle trackers.
*/
class TrackerMap(retentionPeriod: FiniteDuration) extends AutoCloseable with LazyLogging {
class TrackerMap(retentionPeriod: FiniteDuration, loggerFactory: NamedLoggerFactory)
extends AutoCloseable {
private val logger = loggerFactory.getLogger(this.getClass)
private val lock = new Object()
@ -44,7 +47,7 @@ class TrackerMap(retentionPeriod: FiniteDuration) extends AutoCloseable with Laz
trackerResource.ifPresent(tracker =>
if (nanoTime - tracker.getLastSubmission > retentionNanos) {
logger.info(
s"Shutting down tracker for $submitter after inactivity of $retentionPeriod")
s"Shutting down tracker for ${submitter.toString} after inactivity of ${retentionPeriod.toString}")
remove(submitter)
tracker.close()
})
@ -65,7 +68,7 @@ class TrackerMap(retentionPeriod: FiniteDuration) extends AutoCloseable with Laz
val r = new AsyncResource(newTracker.map { t =>
logger.info("Registered tracker for submitter {}", submitter)
Tracker.WithLastSubmission(t)
})
}, loggerFactory)
trackerBySubmitter += submitter -> r
@ -96,7 +99,8 @@ object TrackerMap {
* A holder for an AutoCloseable that can be opened and closed async.
* If closed before the underlying Future completes, will close the resource on completion.
*/
class AsyncResource[T <: AutoCloseable](future: Future[T]) extends LazyLogging {
class AsyncResource[T <: AutoCloseable](future: Future[T], loggerFactory: NamedLoggerFactory) {
private val logger = loggerFactory.getLogger(this.getClass)
sealed trait AsnycResourceState
final case object Waiting extends AsnycResourceState
// the following silent is due to
@ -146,5 +150,6 @@ object TrackerMap {
}
}
def apply(retentionPeriod: FiniteDuration): TrackerMap = new TrackerMap(retentionPeriod)
def apply(retentionPeriod: FiniteDuration, loggerFactory: NamedLoggerFactory): TrackerMap =
new TrackerMap(retentionPeriod, loggerFactory)
}

View File

@ -14,20 +14,22 @@ import com.digitalasset.ledger.api.v1.ledger_identity_service.{
LedgerIdentityServiceLogging
}
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.server.api.ApiException
import com.digitalasset.platform.common.util.DirectExecutionContext
import io.grpc.{BindableService, ServerServiceDefinition, Status}
import org.slf4j.{Logger, LoggerFactory}
import org.slf4j.Logger
import scalaz.syntax.tag._
import scala.concurrent.Future
abstract class ApiLedgerIdentityService private (getLedgerId: () => Future[LedgerId])
abstract class ApiLedgerIdentityService private (
getLedgerId: () => Future[LedgerId],
protected val logger: Logger)
extends GrpcLedgerIdentityService
with GrpcApiService {
protected val logger: Logger = LoggerFactory.getLogger(getClass)
@volatile var closed = false
override def getLedgerIdentity(
@ -48,7 +50,11 @@ abstract class ApiLedgerIdentityService private (getLedgerId: () => Future[Ledge
}
object ApiLedgerIdentityService {
def create(getLedgerId: () => Future[LedgerId])
: ApiLedgerIdentityService with BindableService with LedgerIdentityServiceLogging =
new ApiLedgerIdentityService(getLedgerId) with LedgerIdentityServiceLogging
def create(getLedgerId: () => Future[LedgerId], loggerFactory: NamedLoggerFactory)
: ApiLedgerIdentityService with BindableService with LedgerIdentityServiceLogging = {
val loggerOverride = loggerFactory.getLogger(GrpcLedgerIdentityService.getClass)
new ApiLedgerIdentityService(getLedgerId, loggerOverride) with LedgerIdentityServiceLogging {
override protected val logger: Logger = loggerOverride
}
}
}

View File

@ -15,11 +15,13 @@ import com.digitalasset.ledger.api.v1.testing.time_service.TimeServiceGrpc.TimeS
import com.digitalasset.ledger.api.v1.testing.time_service._
import com.digitalasset.platform.akkastreams.dispatcher.SignalDispatcher
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.server.api.validation.FieldValidations
import com.google.protobuf.empty.Empty
import io.grpc.{BindableService, ServerServiceDefinition, Status, StatusRuntimeException}
import org.slf4j.LoggerFactory
import org.slf4j.Logger
import scalaz.syntax.tag._
import scala.concurrent.{ExecutionContext, Future}
@ -27,7 +29,8 @@ import scala.util.control.NoStackTrace
class ApiTimeService private (
val ledgerId: LedgerId,
backend: TimeServiceBackend
backend: TimeServiceBackend,
protected val logger: Logger
)(
implicit grpcExecutionContext: ExecutionContext,
protected val mat: Materializer,
@ -36,8 +39,6 @@ class ApiTimeService private (
with FieldValidations
with GrpcApiService {
protected val logger = LoggerFactory.getLogger(TimeServiceGrpc.TimeService.getClass)
logger.debug(
"{} initialized with ledger ID {}, start time {}",
this.getClass.getSimpleName,
@ -128,9 +129,13 @@ class ApiTimeService private (
}
object ApiTimeService {
def create(ledgerId: LedgerId, backend: TimeServiceBackend)(
def create(ledgerId: LedgerId, backend: TimeServiceBackend, loggerFactory: NamedLoggerFactory)(
implicit grpcExecutionContext: ExecutionContext,
mat: Materializer,
esf: ExecutionSequencerFactory): TimeService with BindableService with TimeServiceLogging =
new ApiTimeService(ledgerId, backend) with TimeServiceLogging
esf: ExecutionSequencerFactory): TimeService with BindableService with TimeServiceLogging = {
val loggerOverride = loggerFactory.getLogger(TimeServiceGrpc.TimeService.getClass)
new ApiTimeService(ledgerId, backend, loggerOverride) with TimeServiceLogging {
override protected val logger = loggerOverride
}
}
}

View File

@ -18,6 +18,7 @@ import com.digitalasset.daml.lf.engine._
import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.v1.command_completion_service.CompletionEndRequest
import com.digitalasset.ledger.client.services.commands.CommandSubmissionFlow
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.config.CommandConfiguration
import com.digitalasset.platform.sandbox.services._
import com.digitalasset.platform.sandbox.services.admin.ApiPackageManagementService
@ -27,9 +28,9 @@ import com.digitalasset.platform.sandbox.stores.ledger.CommandExecutorImpl
import com.digitalasset.platform.server.services.command.ApiCommandService
import com.digitalasset.platform.server.services.identity.ApiLedgerIdentityService
import com.digitalasset.platform.server.services.testing.{ApiTimeService, TimeServiceBackend}
import io.grpc.BindableService
import io.grpc.protobuf.services.ProtoReflectionService
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.collection.immutable
@ -56,8 +57,6 @@ private case class ApiServicesBundle(services: immutable.Seq[BindableService]) e
object ApiServices {
private val logger = LoggerFactory.getLogger(this.getClass)
def create(
writeService: WriteService,
indexService: IndexService,
@ -65,7 +64,8 @@ object ApiServices {
timeProvider: TimeProvider,
timeModel: TimeModel,
commandConfig: CommandConfiguration,
optTimeServiceBackend: Option[TimeServiceBackend])(
optTimeServiceBackend: Option[TimeServiceBackend],
loggerFactory: NamedLoggerFactory)(
implicit mat: ActorMaterializer,
esf: ExecutionSequencerFactory): Future[ApiServices] = {
implicit val ec: ExecutionContext = mat.system.dispatcher
@ -88,25 +88,26 @@ object ApiServices {
writeService,
timeModel,
timeProvider,
new CommandExecutorImpl(engine, packagesService.getLfPackage)
new CommandExecutorImpl(engine, packagesService.getLfPackage),
loggerFactory
)
logger.info(EngineInfo.show)
loggerFactory.getLogger(this.getClass).info(EngineInfo.show)
val apiTransactionService =
ApiTransactionService.create(ledgerId, transactionsService)
ApiTransactionService.create(ledgerId, transactionsService, loggerFactory)
val apiLedgerIdentityService =
ApiLedgerIdentityService.create(() => identityService.getLedgerId())
ApiLedgerIdentityService.create(() => identityService.getLedgerId(), loggerFactory)
val apiPackageService = ApiPackageService.create(ledgerId, packagesService)
val apiPackageService = ApiPackageService.create(ledgerId, packagesService, loggerFactory)
val apiConfigurationService =
ApiLedgerConfigurationService.create(ledgerId, configurationService)
ApiLedgerConfigurationService.create(ledgerId, configurationService, loggerFactory)
val apiCompletionService =
ApiCommandCompletionService
.create(ledgerId, completionsService)
.create(ledgerId, completionsService, loggerFactory)
val apiCommandService = ApiCommandService.create(
ApiCommandService.Configuration(
@ -126,11 +127,12 @@ object ApiServices {
() => apiCompletionService.completionEnd(CompletionEndRequest(ledgerId.unwrap)),
apiTransactionService.getTransactionById,
apiTransactionService.getFlatTransactionById
)
),
loggerFactory
)
val apiActiveContractsService =
ApiActiveContractsService.create(ledgerId, activeContractsService)
ApiActiveContractsService.create(ledgerId, activeContractsService, loggerFactory)
val apiReflectionService = ProtoReflectionService.newInstance()
@ -138,16 +140,17 @@ object ApiServices {
optTimeServiceBackend.map { tsb =>
ApiTimeService.create(
ledgerId,
tsb
tsb,
loggerFactory
)
}
val apiPartyManagementService =
ApiPartyManagementService
.createApiService(partyManagementService, writeService)
.createApiService(partyManagementService, writeService, loggerFactory)
val apiPackageManagementService =
ApiPackageManagementService.createApiService(indexService, writeService)
ApiPackageManagementService.createApiService(indexService, writeService, loggerFactory)
new ApiServicesBundle(
apiTimeServiceOpt.toList :::

View File

@ -10,13 +10,13 @@ import java.util.concurrent.TimeUnit
import akka.stream.ActorMaterializer
import com.digitalasset.grpc.adapter.{AkkaExecutionSequencerPool, ExecutionSequencerFactory}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import io.grpc.netty.NettyServerBuilder
import io.grpc.ServerInterceptor
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.handler.ssl.SslContext
import io.netty.util.concurrent.DefaultThreadFactory
import io.netty.channel.socket.nio.NioServerSocketChannel
import org.slf4j.LoggerFactory
import scala.concurrent.{Future, Promise}
import scala.util.control.NoStackTrace
@ -37,6 +37,7 @@ object LedgerApiServer {
desiredPort: Int,
maxInboundMessageSize: Int,
address: Option[String],
loggerFactory: NamedLoggerFactory,
sslContext: Option[SslContext] = None,
interceptors: List[ServerInterceptor] = List.empty)(
implicit mat: ActorMaterializer): Future[ApiServer] = {
@ -58,6 +59,7 @@ object LedgerApiServer {
desiredPort,
maxInboundMessageSize,
address,
loggerFactory,
sslContext,
interceptors
)
@ -83,11 +85,12 @@ private class LedgerApiServer(
desiredPort: Int,
maxInboundMessageSize: Int,
address: Option[String],
loggerFactory: NamedLoggerFactory,
sslContext: Option[SslContext] = None,
interceptors: List[ServerInterceptor] = List.empty)(implicit mat: ActorMaterializer)
extends ApiServer {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
class UnableToBind(port: Int, cause: Throwable)
extends RuntimeException(

View File

@ -8,8 +8,9 @@ import akka.stream.Materializer
import akka.stream.scaladsl.Source
import com.daml.ledger.participant.state.index.v2
import com.daml.ledger.participant.state.index.v2.IndexService
import com.daml.ledger.participant.state.v1.{ReadService, ParticipantId}
import com.daml.ledger.participant.state.v1.{ParticipantId, ReadService}
import com.digitalasset.ledger.api.domain.{ParticipantId => _, _}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.stores.LedgerBackedIndexService
@ -26,11 +27,12 @@ object JdbcIndex {
readService: ReadService,
ledgerId: LedgerId,
participantId: ParticipantId,
jdbcUrl: String)(
jdbcUrl: String,
loggerFactory: NamedLoggerFactory)(
implicit mat: Materializer,
mm: MetricsManager): Future[IndexService with AutoCloseable] =
Ledger
.jdbcBackedReadOnly(jdbcUrl, ledgerId)
.jdbcBackedReadOnly(jdbcUrl, ledgerId, loggerFactory)
.map { ledger =>
val contractStore = new SandboxContractStore(ledger)
new LedgerBackedIndexService(MeteredReadOnlyLedger(ledger), contractStore, participantId) {

View File

@ -19,6 +19,7 @@ import com.digitalasset.daml.lf.value.Value.{AbsoluteContractId, ContractId}
import com.digitalasset.daml_lf.DamlLf
import com.digitalasset.ledger.api.domain
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.services.transaction.SandboxEventIdFormatter
@ -41,7 +42,6 @@ import com.digitalasset.platform.sandbox.stores.ledger.sql.serialisation.{
ValueSerializer
}
import com.digitalasset.platform.sandbox.stores.ledger.sql.util.DbDispatcher
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.concurrent.duration._
@ -52,22 +52,23 @@ final abstract class Initialized extends InitStatus
final abstract class Uninitialized extends InitStatus
object JdbcIndexerFactory {
def apply(): JdbcIndexerFactory[Uninitialized] = new JdbcIndexerFactory[Uninitialized]()
def apply(loggerFactory: NamedLoggerFactory): JdbcIndexerFactory[Uninitialized] =
new JdbcIndexerFactory[Uninitialized](loggerFactory)
}
class JdbcIndexerFactory[Status <: InitStatus] private () {
private val logger = LoggerFactory.getLogger(classOf[JdbcIndexer])
class JdbcIndexerFactory[Status <: InitStatus] private (loggerFactory: NamedLoggerFactory) {
private val logger = loggerFactory.getLogger(classOf[JdbcIndexer])
private[index] val asyncTolerance = 30.seconds
def validateSchema(jdbcUrl: String)(
implicit x: Status =:= Uninitialized): JdbcIndexerFactory[Initialized] = {
FlywayMigrations(jdbcUrl).validate()
FlywayMigrations(jdbcUrl, loggerFactory).validate()
this.asInstanceOf[JdbcIndexerFactory[Initialized]]
}
def migrateSchema(jdbcUrl: String)(
implicit x: Status =:= Uninitialized): JdbcIndexerFactory[Initialized] = {
FlywayMigrations(jdbcUrl).migrate()
FlywayMigrations(jdbcUrl, loggerFactory).migrate()
this.asInstanceOf[JdbcIndexerFactory[Initialized]]
}
@ -107,7 +108,8 @@ class JdbcIndexerFactory[Status <: InitStatus] private () {
DbDispatcher(
jdbcUrl,
if (dbType.supportsParallelWrites) defaultNumberOfShortLivedConnections else 1,
defaultNumberOfStreamingConnections)
defaultNumberOfStreamingConnections,
loggerFactory)
val ledgerDao = LedgerDao.metered(
JdbcLedgerDao(
dbDispatcher,
@ -115,7 +117,8 @@ class JdbcIndexerFactory[Status <: InitStatus] private () {
TransactionSerializer,
ValueSerializer,
KeyHasher,
dbType))(mm)
dbType,
loggerFactory))(mm)
ledgerDao
}

View File

@ -7,8 +7,8 @@ import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference}
import akka.actor.Scheduler
import akka.pattern.after
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import org.slf4j.LoggerFactory
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{Await, ExecutionContext, Future}
@ -18,8 +18,9 @@ object RecoveringIndexer {
def apply(
scheduler: Scheduler,
restartDelay: FiniteDuration,
asyncTolerance: FiniteDuration): RecoveringIndexer =
new RecoveringIndexer(scheduler, restartDelay, asyncTolerance)
asyncTolerance: FiniteDuration,
loggerFactory: NamedLoggerFactory): RecoveringIndexer =
new RecoveringIndexer(scheduler, restartDelay, asyncTolerance, loggerFactory)
}
/**
@ -31,9 +32,10 @@ object RecoveringIndexer {
class RecoveringIndexer(
scheduler: Scheduler,
restartDelay: FiniteDuration,
asyncTolerance: FiniteDuration)
asyncTolerance: FiniteDuration,
loggerFactory: NamedLoggerFactory)
extends AutoCloseable {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
val closed = new AtomicBoolean(false)
val lastHandle = new AtomicReference[Option[IndexFeedHandle]](None)

View File

@ -15,17 +15,13 @@ import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.domain
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.ledger.server.apiserver.{ApiServer, ApiServices, LedgerApiServer}
import com.digitalasset.platform.index.StandaloneIndexServer.{
asyncTolerance,
logger,
preloadPackages
}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.index.StandaloneIndexServer.{asyncTolerance, preloadPackages}
import com.digitalasset.platform.index.config.Config
import com.digitalasset.platform.sandbox.BuildInfo
import com.digitalasset.platform.sandbox.config.SandboxConfig
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.stores.InMemoryPackageStore
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
@ -34,18 +30,19 @@ import scala.util.Try
// Main entry point to start an index server that also hosts the ledger API.
// See v2.ReferenceServer on how it is used.
object StandaloneIndexServer {
private val logger = LoggerFactory.getLogger(this.getClass)
private val asyncTolerance = 30.seconds
def apply(
config: Config,
readService: ReadService,
writeService: WriteService): StandaloneIndexServer =
writeService: WriteService,
loggerFactory: NamedLoggerFactory): StandaloneIndexServer =
new StandaloneIndexServer(
"index",
config,
readService,
writeService
writeService,
loggerFactory
)
private val engine = Engine()
@ -76,7 +73,9 @@ class StandaloneIndexServer(
actorSystemName: String,
config: Config,
readService: ReadService,
writeService: WriteService) {
writeService: WriteService,
loggerFactory: NamedLoggerFactory) {
private val logger = loggerFactory.getLogger(this.getClass)
// Name of this participant,
val participantId: ParticipantId = config.participantId
@ -141,7 +140,8 @@ class StandaloneIndexServer(
readService,
domain.LedgerId(cond.ledgerId),
participantId,
config.jdbcUrl)
config.jdbcUrl,
loggerFactory)
} yield (cond.ledgerId, cond.config.timeModel, indexService)
val (actualLedgerId, timeModel, indexService) = Try(Await.result(initF, asyncTolerance))
@ -162,10 +162,12 @@ class StandaloneIndexServer(
config.timeProvider,
timeModel,
SandboxConfig.defaultCommandConfig,
None)(am, esf),
None,
loggerFactory)(am, esf),
config.port,
config.maxInboundMessageSize,
None,
loggerFactory,
config.tlsConfig.flatMap(_.server)
),
asyncTolerance

View File

@ -5,6 +5,7 @@ package com.digitalasset.platform.index
import akka.actor.ActorSystem
import com.daml.ledger.participant.state.v1.ReadService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.index.config.{Config, StartupMode}
@ -15,11 +16,18 @@ import scala.concurrent.duration._
object StandaloneIndexerServer {
private[this] val actorSystem = ActorSystem("StandaloneIndexerServer")
def apply(readService: ReadService, config: Config): AutoCloseable = {
def apply(
readService: ReadService,
config: Config,
loggerFactory: NamedLoggerFactory): AutoCloseable = {
val indexerFactory = JdbcIndexerFactory()
val indexerFactory = JdbcIndexerFactory(loggerFactory)
val indexer =
RecoveringIndexer(actorSystem.scheduler, 10.seconds, indexerFactory.asyncTolerance)
RecoveringIndexer(
actorSystem.scheduler,
10.seconds,
indexerFactory.asyncTolerance,
loggerFactory)
config.startupMode match {
case StartupMode.MigrateOnly =>

View File

@ -16,6 +16,7 @@ import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.ledger.server.apiserver.{ApiServer, ApiServices, LedgerApiServer}
import com.digitalasset.platform.common.LedgerIdMode
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.SandboxServer.{asyncTolerance, createInitialState, logger}
import com.digitalasset.platform.sandbox.banner.Banner
import com.digitalasset.platform.sandbox.config.SandboxConfig
@ -150,11 +151,15 @@ class SandboxServer(actorSystemName: String, config: => SandboxConfig) extends A
def port: Int = sandboxState.apiServerState.port
/** the reset service is special, since it triggers a server shutdown */
private def resetService(ledgerId: LedgerId): SandboxResetService = new SandboxResetService(
ledgerId,
() => sandboxState.infra.executionContext,
() => sandboxState.resetAndRestartServer()
)
private def resetService(
ledgerId: LedgerId,
loggerFactory: NamedLoggerFactory): SandboxResetService =
new SandboxResetService(
ledgerId,
() => sandboxState.infra.executionContext,
() => sandboxState.resetAndRestartServer(),
loggerFactory
)
sandboxState = start()
@ -184,6 +189,8 @@ class SandboxServer(actorSystemName: String, config: => SandboxConfig) extends A
(ts, Some(ts))
}
val loggerFactory = NamedLoggerFactory.forParticipant(participantId)
val (ledgerType, indexAndWriteServiceF) = config.jdbcUrl match {
case Some(jdbcUrl) =>
"postgres" -> SandboxIndexAndWriteService.postgres(
@ -196,7 +203,8 @@ class SandboxServer(actorSystemName: String, config: => SandboxConfig) extends A
ledgerEntries,
startMode,
config.commandConfig.maxCommandsInFlight * 2, // we can get commands directly as well on the submission service
packageStore
packageStore,
loggerFactory
)
case None =>
@ -235,15 +243,17 @@ class SandboxServer(actorSystemName: String, config: => SandboxConfig) extends A
TimeServiceBackend.withObserver(
_,
indexAndWriteService.publishHeartbeat
))
)),
loggerFactory
)(am, esf)
.map(_.withServices(List(resetService(ledgerId)))),
.map(_.withServices(List(resetService(ledgerId, loggerFactory)))),
// NOTE(JM): Re-use the same port after reset.
Option(sandboxState).fold(config.port)(_.apiServerState.port),
config.maxInboundMessageSize,
config.address,
loggerFactory,
config.tlsConfig.flatMap(_.server),
List(resetService(ledgerId))
List(resetService(ledgerId, loggerFactory))
),
asyncTolerance
)

View File

@ -17,11 +17,12 @@ import com.digitalasset.ledger.api.v1.active_contracts_service._
import com.digitalasset.ledger.api.v1.event.CreatedEvent
import com.digitalasset.ledger.api.validation.TransactionFilterValidator
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.participant.util.LfEngineToApi
import com.digitalasset.platform.server.api.validation.ActiveContractsServiceValidation
import io.grpc.{BindableService, ServerServiceDefinition}
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
@ -29,14 +30,16 @@ import scalaz.syntax.tag._
class ApiActiveContractsService private (
backend: ACSBackend,
parallelism: Int = Runtime.getRuntime.availableProcessors)(
parallelism: Int = Runtime.getRuntime.availableProcessors,
loggerFactory: NamedLoggerFactory)(
implicit executionContext: ExecutionContext,
protected val mat: Materializer,
protected val esf: ExecutionSequencerFactory)
extends ActiveContractsServiceAkkaGrpc
with GrpcApiService {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
@SuppressWarnings(Array("org.wartremover.warts.Option2Iterable"))
override protected def getActiveContractsSource(
request: GetActiveContractsRequest): Source[GetActiveContractsResponse, NotUsed] = {
@ -95,13 +98,13 @@ object ApiActiveContractsService {
type TransactionId = String
type WorkflowId = String
def create(ledgerId: LedgerId, backend: ACSBackend)(
def create(ledgerId: LedgerId, backend: ACSBackend, loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext,
mat: Materializer,
esf: ExecutionSequencerFactory)
: ActiveContractsService with BindableService with ActiveContractsServiceLogging =
new ActiveContractsServiceValidation(
new ApiActiveContractsService(backend)(ec, mat, esf),
new ApiActiveContractsService(backend, loggerFactory = loggerFactory)(ec, mat, esf),
ledgerId
) with BindableService with ActiveContractsServiceLogging {
override def bindService(): ServerServiceDefinition =

View File

@ -15,24 +15,25 @@ import com.digitalasset.ledger.api.domain.{CompletionEvent, LedgerId, LedgerOffs
import com.digitalasset.ledger.api.messages.command.completion.CompletionStreamRequest
import com.digitalasset.ledger.api.v1.command_completion_service._
import com.digitalasset.ledger.api.validation.PartyNameChecker
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.participant.util.Slf4JLog
import com.digitalasset.platform.server.api.services.domain.CommandCompletionService
import com.digitalasset.platform.server.api.services.grpc.GrpcCommandCompletionService
import io.grpc.{BindableService, ServerServiceDefinition}
import org.slf4j.LoggerFactory
import org.slf4j.Logger
import scala.concurrent.{ExecutionContext, Future}
class ApiCommandCompletionService private (
completionsService: IndexCompletionsService
completionsService: IndexCompletionsService,
loggerFactory: NamedLoggerFactory
)(
implicit ec: ExecutionContext,
protected val mat: Materializer,
protected val esf: ExecutionSequencerFactory)
extends CommandCompletionService {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
private val subscriptionIdCounter = new AtomicLong()
@ -58,18 +59,22 @@ class ApiCommandCompletionService private (
}
object ApiCommandCompletionService {
def create(ledgerId: LedgerId, completionsService: IndexCompletionsService)(
def create(
ledgerId: LedgerId,
completionsService: IndexCompletionsService,
loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext,
mat: Materializer,
esf: ExecutionSequencerFactory): GrpcCommandCompletionService
with BindableService
with AutoCloseable
with CommandCompletionServiceLogging = {
val impl: CommandCompletionService = new ApiCommandCompletionService(completionsService)
val impl: CommandCompletionService =
new ApiCommandCompletionService(completionsService, loggerFactory)
new GrpcCommandCompletionService(ledgerId, impl, PartyNameChecker.AllowAllParties)
with BindableService with CommandCompletionServiceLogging {
override val logger = LoggerFactory.getLogger(impl.getClass)
override val logger: Logger = loggerFactory.getLogger(impl.getClass)
override def bindService(): ServerServiceDefinition =
CommandCompletionServiceGrpc.bindService(this, DirectExecutionContext)
}

View File

@ -12,10 +12,12 @@ import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.ledger.api.v1.ledger_configuration_service._
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.server.api.validation.LedgerConfigurationServiceValidation
import io.grpc.{BindableService, ServerServiceDefinition}
import io.grpc.{BindableService, ServerServiceDefinition}
import org.slf4j.Logger
import scala.concurrent.ExecutionContext
class ApiLedgerConfigurationService private (configurationService: IndexConfigurationService)(
@ -42,7 +44,10 @@ class ApiLedgerConfigurationService private (configurationService: IndexConfigur
}
object ApiLedgerConfigurationService {
def create(ledgerId: LedgerId, configurationService: IndexConfigurationService)(
def create(
ledgerId: LedgerId,
configurationService: IndexConfigurationService,
loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext,
esf: ExecutionSequencerFactory,
mat: Materializer)
@ -50,6 +55,8 @@ object ApiLedgerConfigurationService {
new LedgerConfigurationServiceValidation(
new ApiLedgerConfigurationService(configurationService),
ledgerId) with BindableService with LedgerConfigurationServiceLogging {
override protected val logger: Logger =
loggerFactory.getLogger(ApiLedgerConfigurationService.getClass)
override def bindService(): ServerServiceDefinition =
LedgerConfigurationServiceGrpc.bindService(this, DirectExecutionContext)
}

View File

@ -16,11 +16,13 @@ import com.digitalasset.ledger.api.v1.package_service.{
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.server.api.validation.PackageServiceValidation
import io.grpc.{BindableService, ServerServiceDefinition, Status}
import com.digitalasset.ledger.api.v1.package_service.HashFunction.{
SHA256 => APISHA256,
Unrecognized => APIUnrecognized
}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import scala.concurrent.{ExecutionContext, Future}
@ -85,10 +87,12 @@ class ApiPackageService private (backend: IndexPackagesService)
}
object ApiPackageService {
def create(ledgerId: LedgerId, backend: IndexPackagesService)(implicit ec: ExecutionContext)
def create(ledgerId: LedgerId, backend: IndexPackagesService, loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext)
: PackageService with BindableService with PackageServiceLogging =
new PackageServiceValidation(new ApiPackageService(backend), ledgerId) with BindableService
with PackageServiceLogging {
override protected val logger = loggerFactory.getLogger(PackageService.getClass)
override def bindService(): ServerServiceDefinition =
PackageServiceGrpc.bindService(this, DEC)
}

View File

@ -24,13 +24,14 @@ import com.digitalasset.daml.lf.transaction.Transaction.Transaction
import com.digitalasset.grpc.adapter.utils.DirectExecutionContext
import com.digitalasset.ledger.api.domain.{LedgerId, Commands => ApiCommands}
import com.digitalasset.ledger.api.messages.command.submission.SubmitRequest
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.stores.ledger.{CommandExecutor, ErrorCause}
import com.digitalasset.platform.server.api.services.domain.CommandSubmissionService
import com.digitalasset.platform.server.api.services.grpc.GrpcCommandSubmissionService
import com.digitalasset.platform.server.api.validation.ErrorFactories
import com.digitalasset.platform.server.services.command.time.TimeModelValidator
import io.grpc.{BindableService, Status}
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.compat.java8.FutureConverters
@ -47,7 +48,8 @@ object ApiSubmissionService {
writeService: WriteService,
timeModel: TimeModel,
timeProvider: TimeProvider,
commandExecutor: CommandExecutor)(implicit ec: ExecutionContext, mat: ActorMaterializer)
commandExecutor: CommandExecutor,
loggerFactory: NamedLoggerFactory)(implicit ec: ExecutionContext, mat: ActorMaterializer)
: GrpcCommandSubmissionService with BindableService with CommandSubmissionServiceLogging =
new GrpcCommandSubmissionService(
new ApiSubmissionService(
@ -55,7 +57,8 @@ object ApiSubmissionService {
writeService,
timeModel,
timeProvider,
commandExecutor),
commandExecutor,
loggerFactory),
ledgerId
) with CommandSubmissionServiceLogging
@ -70,12 +73,13 @@ class ApiSubmissionService private (
writeService: WriteService,
timeModel: TimeModel,
timeProvider: TimeProvider,
commandExecutor: CommandExecutor)(implicit ec: ExecutionContext, mat: ActorMaterializer)
commandExecutor: CommandExecutor,
loggerFactory: NamedLoggerFactory)(implicit ec: ExecutionContext, mat: ActorMaterializer)
extends CommandSubmissionService
with ErrorFactories
with AutoCloseable {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
private val validator = TimeModelValidator(timeModel)
override def submit(request: SubmitRequest): Future[Unit] = {

View File

@ -11,18 +11,20 @@ import com.google.protobuf.empty.Empty
import io.grpc._
import io.grpc.ServerCall.Listener
import java.util.concurrent.atomic.AtomicBoolean
import org.slf4j.LoggerFactory
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import scala.concurrent.{ExecutionContext, Future, Promise}
class SandboxResetService(
ledgerId: LedgerId,
getEc: () => ExecutionContext,
resetAndRestartServer: () => Future[Unit])
resetAndRestartServer: () => Future[Unit],
loggerFactory: NamedLoggerFactory)
extends ResetServiceGrpc.ResetService
with BindableService
with ServerInterceptor {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
private val resetInitialized = new AtomicBoolean(false)

View File

@ -15,11 +15,13 @@ import com.digitalasset.daml_lf.DamlLf.Archive
import com.digitalasset.ledger.api.v1.admin.package_management_service.PackageManagementServiceGrpc.PackageManagementService
import com.digitalasset.ledger.api.v1.admin.package_management_service._
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DE}
import com.digitalasset.platform.server.api.validation.ErrorFactories
import com.google.protobuf.timestamp.Timestamp
import io.grpc.ServerServiceDefinition
import org.slf4j.{Logger, LoggerFactory}
import org.slf4j.Logger
import scala.compat.java8.FutureConverters
import scala.concurrent.Future
@ -29,11 +31,12 @@ import scala.util.Try
class ApiPackageManagementService(
packagesIndex: IndexPackagesService,
packagesWrite: WritePackagesService,
scheduler: Scheduler)
scheduler: Scheduler,
loggerFactory: NamedLoggerFactory)
extends PackageManagementService
with GrpcApiService {
protected val logger: Logger = LoggerFactory.getLogger(PackageManagementService.getClass)
protected val logger: Logger = loggerFactory.getLogger(PackageManagementService.getClass)
override def close(): Unit = ()
@ -105,7 +108,8 @@ class ApiPackageManagementService(
50.milliseconds,
500.milliseconds,
d => d * 2,
scheduler)
scheduler,
loggerFactory)
.map { numberOfAttempts =>
logger.debug(
s"All ${ids.length} packages available, read after $numberOfAttempts attempt(s)")
@ -115,8 +119,10 @@ class ApiPackageManagementService(
}
object ApiPackageManagementService {
def createApiService(readBackend: IndexPackagesService, writeBackend: WritePackagesService)(
implicit mat: ActorMaterializer): GrpcApiService =
new ApiPackageManagementService(readBackend, writeBackend, mat.system.scheduler)
def createApiService(
readBackend: IndexPackagesService,
writeBackend: WritePackagesService,
loggerFactory: NamedLoggerFactory)(implicit mat: ActorMaterializer): GrpcApiService =
new ApiPackageManagementService(readBackend, writeBackend, mat.system.scheduler, loggerFactory)
with PackageManagementServiceLogging
}

View File

@ -11,10 +11,11 @@ import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.v1.admin.party_management_service.PartyManagementServiceGrpc.PartyManagementService
import com.digitalasset.ledger.api.v1.admin.party_management_service._
import com.digitalasset.platform.api.grpc.GrpcApiService
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DE}
import com.digitalasset.platform.server.api.validation.ErrorFactories
import io.grpc.ServerServiceDefinition
import org.slf4j.LoggerFactory
import scala.compat.java8.FutureConverters
import scala.concurrent.duration.DurationInt
@ -23,11 +24,12 @@ import scala.concurrent.{ExecutionContext, Future}
class ApiPartyManagementService private (
partyManagementService: IndexPartyManagementService,
writeService: WritePartyService,
scheduler: Scheduler
scheduler: Scheduler,
loggerFactory: NamedLoggerFactory
) extends PartyManagementService
with GrpcApiService {
protected val logger = LoggerFactory.getLogger(this.getClass)
protected val logger = loggerFactory.getLogger(this.getClass)
override def close(): Unit = ()
@ -70,7 +72,8 @@ class ApiPartyManagementService private (
50.milliseconds,
500.milliseconds,
d => d * 2,
scheduler)
scheduler,
loggerFactory)
.map { numberOfAttempts =>
logger.debug(s"Party $newParty available, read after $numberOfAttempts attempt(s)")
result
@ -102,11 +105,14 @@ class ApiPartyManagementService private (
}
object ApiPartyManagementService {
def createApiService(readBackend: IndexPartyManagementService, writeBackend: WritePartyService)(
def createApiService(
readBackend: IndexPartyManagementService,
writeBackend: WritePartyService,
loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext,
esf: ExecutionSequencerFactory,
mat: ActorMaterializer): GrpcApiService =
new ApiPartyManagementService(readBackend, writeBackend, mat.system.scheduler)
new ApiPartyManagementService(readBackend, writeBackend, mat.system.scheduler, loggerFactory)
with PartyManagementServiceLogging
}

View File

@ -5,16 +5,14 @@ package com.digitalasset.platform.sandbox.services.admin
import akka.actor.Scheduler
import akka.pattern.after
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DE}
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
import scala.concurrent.duration.{DurationInt, FiniteDuration}
object PollingUtils {
private val logger: Logger = LoggerFactory.getLogger(PollingUtils.getClass)
/**
* Continuously polls the given service to check if the given item has been persisted.
*
@ -41,7 +39,11 @@ object PollingUtils {
minWait: FiniteDuration,
maxWait: FiniteDuration,
backoffProgression: FiniteDuration => FiniteDuration,
scheduler: Scheduler): Future[Int] = {
scheduler: Scheduler,
loggerFactory: NamedLoggerFactory): Future[Int] = {
val logger = loggerFactory.getLogger(this.getClass)
def go(attempt: Int, waitTime: FiniteDuration): Future[Int] = {
logger.debug(s"Polling for '$description' being persisted (attempt #$attempt)...")
poll()

View File

@ -18,22 +18,26 @@ import com.digitalasset.platform.sandbox.services.transaction.SandboxEventIdForm
import com.digitalasset.platform.server.api.services.domain.TransactionService
import com.digitalasset.platform.server.api.services.grpc.GrpcTransactionService
import com.digitalasset.platform.server.api.validation.ErrorFactories
import io.grpc._
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import com.digitalasset.ledger.api.v1.transaction_service.{TransactionServiceLogging}
import com.digitalasset.ledger.api.v1.transaction_service.TransactionServiceLogging
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import scala.concurrent.{ExecutionContext, Future}
object ApiTransactionService {
def create(ledgerId: LedgerId, transactionsService: IndexTransactionsService)(
def create(
ledgerId: LedgerId,
transactionsService: IndexTransactionsService,
loggerFactory: NamedLoggerFactory)(
implicit ec: ExecutionContext,
mat: Materializer,
esf: ExecutionSequencerFactory)
: GrpcTransactionService with BindableService with TransactionServiceLogging =
new GrpcTransactionService(
new ApiTransactionService(transactionsService),
new ApiTransactionService(transactionsService, loggerFactory),
ledgerId,
PartyNameChecker.AllowAllParties
) with TransactionServiceLogging
@ -41,6 +45,7 @@ object ApiTransactionService {
class ApiTransactionService private (
transactionsService: IndexTransactionsService,
loggerFactory: NamedLoggerFactory,
parallelism: Int = 4)(
implicit executionContext: ExecutionContext,
materializer: Materializer,
@ -48,7 +53,7 @@ class ApiTransactionService private (
extends TransactionService
with ErrorFactories {
private val logger = LoggerFactory.getLogger(this.getClass)
private val logger = loggerFactory.getLogger(this.getClass)
private val subscriptionIdCounter = new AtomicLong()

View File

@ -32,6 +32,7 @@ import com.digitalasset.ledger.api.domain.CompletionEvent.{
CommandRejected
}
import com.digitalasset.ledger.api.domain.{ParticipantId => _, _}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.participant.util.EventFilter
import com.digitalasset.platform.sandbox.metrics.MetricsManager
@ -70,7 +71,8 @@ object SandboxIndexAndWriteService {
ledgerEntries: ImmArray[LedgerEntryOrBump],
startMode: SqlStartMode,
queueDepth: Int,
templateStore: InMemoryPackageStore)(
templateStore: InMemoryPackageStore,
loggerFactory: NamedLoggerFactory)(
implicit mat: Materializer,
mm: MetricsManager): Future[IndexAndWriteService] =
Ledger
@ -82,7 +84,8 @@ object SandboxIndexAndWriteService {
templateStore,
ledgerEntries,
queueDepth,
startMode
startMode,
loggerFactory
)
.map(ledger =>
createInstance(Ledger.metered(ledger), participantId, timeModel, timeProvider))(DEC)

View File

@ -19,6 +19,7 @@ import com.digitalasset.daml.lf.value.Value
import com.digitalasset.daml.lf.value.Value.AbsoluteContractId
import com.digitalasset.daml_lf.DamlLf.Archive
import com.digitalasset.ledger.api.domain.{LedgerId, PartyDetails}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.participant.util.EventFilter.TemplateAwareFilter
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.stores.ActiveLedgerState.Contract
@ -125,7 +126,8 @@ object Ledger {
packages: InMemoryPackageStore,
ledgerEntries: ImmArray[LedgerEntryOrBump],
queueDepth: Int,
startMode: SqlStartMode
startMode: SqlStartMode,
loggerFactory: NamedLoggerFactory
)(implicit mat: Materializer, mm: MetricsManager): Future[Ledger] =
SqlLedger(
jdbcUrl,
@ -135,7 +137,8 @@ object Ledger {
packages,
ledgerEntries,
queueDepth,
startMode)
startMode,
loggerFactory)
/**
* Creates a JDBC backed read only ledger
@ -148,8 +151,9 @@ object Ledger {
def jdbcBackedReadOnly(
jdbcUrl: String,
ledgerId: LedgerId,
loggerFactory: NamedLoggerFactory
)(implicit mat: Materializer, mm: MetricsManager): Future[ReadOnlyLedger] =
ReadOnlySqlLedger(jdbcUrl, Some(ledgerId))
ReadOnlySqlLedger(jdbcUrl, Some(ledgerId), loggerFactory)
/** Wraps the given Ledger adding metrics around important calls */
def metered(ledger: Ledger)(implicit mm: MetricsManager): Ledger = MeteredLedger(ledger)

View File

@ -7,6 +7,7 @@ import akka.NotUsed
import akka.stream._
import akka.stream.scaladsl.{Keep, RestartSource, Sink, Source}
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.stores.ledger.ReadOnlyLedger
@ -23,7 +24,6 @@ import com.digitalasset.platform.sandbox.stores.ledger.sql.serialisation.{
ValueSerializer
}
import com.digitalasset.platform.sandbox.stores.ledger.sql.util.DbDispatcher
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.concurrent.duration._
@ -35,14 +35,14 @@ object ReadOnlySqlLedger {
val noOfStreamingConnections = 2
//jdbcUrl must have the user/password encoded in form of: "jdbc:postgresql://localhost/test?user=fred&password=secret"
def apply(jdbcUrl: String, ledgerId: Option[LedgerId])(
def apply(jdbcUrl: String, ledgerId: Option[LedgerId], loggerFactory: NamedLoggerFactory)(
implicit mat: Materializer,
mm: MetricsManager): Future[ReadOnlyLedger] = {
implicit val ec: ExecutionContext = DEC
val dbType = DbType.jdbcType(jdbcUrl)
val dbDispatcher =
DbDispatcher(jdbcUrl, noOfShortLivedConnections, noOfStreamingConnections)
DbDispatcher(jdbcUrl, noOfShortLivedConnections, noOfStreamingConnections, loggerFactory)
val ledgerReadDao = LedgerDao.meteredRead(
JdbcLedgerDao(
dbDispatcher,
@ -50,9 +50,10 @@ object ReadOnlySqlLedger {
TransactionSerializer,
ValueSerializer,
KeyHasher,
dbType))
dbType,
loggerFactory))
ReadOnlySqlLedgerFactory(ledgerReadDao).createReadOnlySqlLedger(ledgerId)
ReadOnlySqlLedgerFactory(ledgerReadDao, loggerFactory).createReadOnlySqlLedger(ledgerId)
}
}
@ -84,9 +85,11 @@ private class ReadOnlySqlLedger(
}
}
private class ReadOnlySqlLedgerFactory(ledgerDao: LedgerReadDao) {
private class ReadOnlySqlLedgerFactory(
ledgerDao: LedgerReadDao,
loggerFactory: NamedLoggerFactory) {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
/** *
* Creates a DB backed Ledger implementation.
@ -149,6 +152,6 @@ private class ReadOnlySqlLedgerFactory(ledgerDao: LedgerReadDao) {
}
private object ReadOnlySqlLedgerFactory {
def apply(ledgerDao: LedgerReadDao): ReadOnlySqlLedgerFactory =
new ReadOnlySqlLedgerFactory(ledgerDao)
def apply(ledgerDao: LedgerReadDao, loggerFactory: NamedLoggerFactory): ReadOnlySqlLedgerFactory =
new ReadOnlySqlLedgerFactory(ledgerDao, loggerFactory)
}

View File

@ -19,6 +19,7 @@ import com.digitalasset.daml.lf.engine.Blinding
import com.digitalasset.daml.lf.value.Value.{AbsoluteContractId, ContractId}
import com.digitalasset.daml_lf.DamlLf.Archive
import com.digitalasset.ledger.api.domain.{LedgerId, PartyDetails, RejectionReason}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import com.digitalasset.platform.sandbox.LedgerIdGenerator
import com.digitalasset.platform.sandbox.metrics.MetricsManager
@ -39,7 +40,6 @@ import com.digitalasset.platform.sandbox.stores.ledger.sql.serialisation.{
import com.digitalasset.platform.sandbox.stores.ledger.sql.util.DbDispatcher
import com.digitalasset.platform.sandbox.stores.ledger.{Ledger, LedgerEntry}
import com.digitalasset.platform.sandbox.stores.{InMemoryActiveLedgerState, InMemoryPackageStore}
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.collection.immutable
@ -73,18 +73,23 @@ object SqlLedger {
packages: InMemoryPackageStore,
initialLedgerEntries: ImmArray[LedgerEntryOrBump],
queueDepth: Int,
startMode: SqlStartMode = SqlStartMode.ContinueIfExists)(
startMode: SqlStartMode = SqlStartMode.ContinueIfExists,
loggerFactory: NamedLoggerFactory)(
implicit mat: Materializer,
mm: MetricsManager): Future[Ledger] = {
implicit val ec: ExecutionContext = DEC
new FlywayMigrations(jdbcUrl).migrate()
new FlywayMigrations(jdbcUrl, loggerFactory).migrate()
val dbType = DbType.jdbcType(jdbcUrl)
val noOfShortLivedConnections =
if (dbType.supportsParallelWrites) defaultNumberOfShortLivedConnections else 1
val dbDispatcher =
DbDispatcher(jdbcUrl, noOfShortLivedConnections, defaultNumberOfStreamingConnections)
DbDispatcher(
jdbcUrl,
noOfShortLivedConnections,
defaultNumberOfStreamingConnections,
loggerFactory)
val ledgerDao = LedgerDao.metered(
JdbcLedgerDao(
@ -93,9 +98,10 @@ object SqlLedger {
TransactionSerializer,
ValueSerializer,
KeyHasher,
dbType))
dbType,
loggerFactory))
val sqlLedgerFactory = SqlLedgerFactory(ledgerDao)
val sqlLedgerFactory = SqlLedgerFactory(ledgerDao, loggerFactory)
sqlLedgerFactory.createSqlLedger(
ledgerId,
@ -119,11 +125,12 @@ private class SqlLedger(
timeProvider: TimeProvider,
packages: InMemoryPackageStore,
queueDepth: Int,
maxBatchSize: Int)(implicit mat: Materializer)
maxBatchSize: Int,
loggerFactory: NamedLoggerFactory)(implicit mat: Materializer)
extends BaseLedger(ledgerId, headAtInitialization, ledgerDao)
with Ledger {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
// the reason for modelling persistence as a reactive pipeline is to avoid having race-conditions between the
// moving ledger-end, the async persistence operation and the dispatcher head notification
@ -323,9 +330,9 @@ private class SqlLedger(
}
}
private class SqlLedgerFactory(ledgerDao: LedgerDao) {
private class SqlLedgerFactory(ledgerDao: LedgerDao, loggerFactory: NamedLoggerFactory) {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
/** *
* Creates a DB backed Ledger implementation.
@ -376,7 +383,8 @@ private class SqlLedgerFactory(ledgerDao: LedgerDao) {
timeProvider,
packages,
queueDepth,
maxBatchSize)
maxBatchSize,
loggerFactory)
}
private def reset(): Future[Unit] =
@ -491,5 +499,6 @@ private class SqlLedgerFactory(ledgerDao: LedgerDao) {
}
private object SqlLedgerFactory {
def apply(ledgerDao: LedgerDao): SqlLedgerFactory = new SqlLedgerFactory(ledgerDao)
def apply(ledgerDao: LedgerDao, loggerFactory: NamedLoggerFactory): SqlLedgerFactory =
new SqlLedgerFactory(ledgerDao, loggerFactory)
}

View File

@ -5,8 +5,8 @@ package com.digitalasset.platform.sandbox.stores.ledger.sql.dao
import java.sql.Connection
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import org.slf4j.LoggerFactory
import scala.concurrent.duration.{FiniteDuration, _}
import scala.util.control.NonFatal
@ -52,10 +52,11 @@ object HikariConnection {
class HikariJdbcConnectionProvider(
jdbcUrl: String,
noOfShortLivedConnections: Int,
noOfStreamingConnections: Int)
noOfStreamingConnections: Int,
loggerFactory: NamedLoggerFactory)
extends JdbcConnectionProvider {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
// these connections should never timeout as we have exactly the same number of threads using them as many connections we have
private val shortLivedDataSource =
HikariConnection.createDataSource(
@ -106,6 +107,11 @@ object HikariJdbcConnectionProvider {
def apply(
jdbcUrl: String,
noOfShortLivedConnections: Int,
noOfStreamingConnections: Int): JdbcConnectionProvider =
new HikariJdbcConnectionProvider(jdbcUrl, noOfShortLivedConnections, noOfStreamingConnections)
noOfStreamingConnections: Int,
loggerFactory: NamedLoggerFactory): JdbcConnectionProvider =
new HikariJdbcConnectionProvider(
jdbcUrl,
noOfShortLivedConnections,
noOfStreamingConnections,
loggerFactory)
}

View File

@ -25,6 +25,7 @@ import com.digitalasset.daml_lf.DamlLf.Archive
import com.digitalasset.ledger._
import com.digitalasset.ledger.api.domain.RejectionReason._
import com.digitalasset.ledger.api.domain.{LedgerId, PartyDetails, RejectionReason}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.participant.util.EventFilter.TemplateAwareFilter
import com.digitalasset.platform.sandbox.stores.ActiveLedgerState.{
@ -48,7 +49,6 @@ import com.digitalasset.platform.sandbox.stores.ledger.sql.serialisation.{
import com.digitalasset.platform.sandbox.stores.ledger.sql.util.Conversions._
import com.digitalasset.platform.sandbox.stores.ledger.sql.util.DbDispatcher
import com.google.common.io.ByteStreams
import org.slf4j.LoggerFactory
import scalaz.syntax.tag._
import scala.collection.immutable
@ -62,14 +62,15 @@ private class JdbcLedgerDao(
transactionSerializer: TransactionSerializer,
valueSerializer: ValueSerializer,
keyHasher: KeyHasher,
dbType: DbType)
dbType: DbType,
loggerFactory: NamedLoggerFactory)
extends LedgerDao {
private val queries = dbType match {
case DbType.Postgres => PostgresQueries
case DbType.H2Database => H2DatabaseQueries
}
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
private val SQL_SELECT_LEDGER_ID = SQL("select ledger_id from parameters")
@ -1247,14 +1248,16 @@ object JdbcLedgerDao {
transactionSerializer: TransactionSerializer,
valueSerializer: ValueSerializer,
keyHasher: KeyHasher,
dbType: DbType): LedgerDao =
dbType: DbType,
loggerFactory: NamedLoggerFactory): LedgerDao =
new JdbcLedgerDao(
dbDispatcher,
contractSerializer,
transactionSerializer,
valueSerializer,
keyHasher,
dbType)
dbType,
loggerFactory)
sealed trait Queries {

View File

@ -3,19 +3,19 @@
package com.digitalasset.platform.sandbox.stores.ledger.sql.migration
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.stores.ledger.sql.dao.{DbType, HikariConnection}
import org.flywaydb.core.Flyway
import org.flywaydb.core.api.configuration.FluentConfiguration
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.util.Try
import scala.util.control.NonFatal
class FlywayMigrations(jdbcUrl: String) {
class FlywayMigrations(jdbcUrl: String, loggerFactory: NamedLoggerFactory) {
import FlywayMigrations._
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
private val dbType = DbType.jdbcType(jdbcUrl)
private def newDataSource =
@ -63,6 +63,6 @@ object FlywayMigrations {
def configurationBase(dbType: DbType): FluentConfiguration =
Flyway.configure.locations("classpath:db/migration/" + dbType.name)
def apply(jdbcUrl: String): FlywayMigrations =
new FlywayMigrations(jdbcUrl)
def apply(jdbcUrl: String, loggerFactory: NamedLoggerFactory): FlywayMigrations =
new FlywayMigrations(jdbcUrl, loggerFactory)
}

View File

@ -8,10 +8,10 @@ import java.util.concurrent.Executors
import akka.stream.scaladsl.Source
import akka.{Done, NotUsed}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.DirectExecutionContext
import com.digitalasset.platform.sandbox.stores.ledger.sql.dao.HikariJdbcConnectionProvider
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
@ -43,13 +43,18 @@ trait DbDispatcher extends AutoCloseable {
private class DbDispatcherImpl(
jdbcUrl: String,
val noOfShortLivedConnections: Int,
noOfStreamingConnections: Int)
noOfStreamingConnections: Int,
loggerFactory: NamedLoggerFactory)
extends DbDispatcher {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
private val connectionProvider =
HikariJdbcConnectionProvider(jdbcUrl, noOfShortLivedConnections, noOfStreamingConnections)
private val sqlExecutor = SqlExecutor(noOfShortLivedConnections)
HikariJdbcConnectionProvider(
jdbcUrl,
noOfShortLivedConnections,
noOfStreamingConnections,
loggerFactory)
private val sqlExecutor = SqlExecutor(noOfShortLivedConnections, loggerFactory)
private val connectionGettingThreadPool = ExecutionContext.fromExecutorService(
Executors.newSingleThreadExecutor(
@ -97,6 +102,11 @@ object DbDispatcher {
def apply(
jdbcUrl: String,
noOfShortLivedConnections: Int,
noOfStreamingConnections: Int): DbDispatcher =
new DbDispatcherImpl(jdbcUrl, noOfShortLivedConnections, noOfStreamingConnections)
noOfStreamingConnections: Int,
loggerFactory: NamedLoggerFactory): DbDispatcher =
new DbDispatcherImpl(
jdbcUrl,
noOfShortLivedConnections,
noOfStreamingConnections,
loggerFactory)
}

View File

@ -5,16 +5,16 @@ package com.digitalasset.platform.sandbox.stores.ledger.sql.util
import java.util.concurrent.{Executors, TimeUnit}
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.slf4j.LoggerFactory
import scala.concurrent.{Future, Promise}
import scala.util.control.NonFatal
/** A dedicated executor for blocking sql queries. */
class SqlExecutor(noOfThread: Int) extends AutoCloseable {
class SqlExecutor(noOfThread: Int, loggerFactory: NamedLoggerFactory) extends AutoCloseable {
private val logger = LoggerFactory.getLogger(getClass)
private val logger = loggerFactory.getLogger(getClass)
private lazy val executor =
Executors.newFixedThreadPool(
@ -60,5 +60,6 @@ class SqlExecutor(noOfThread: Int) extends AutoCloseable {
}
object SqlExecutor {
def apply(noOfThread: Int): SqlExecutor = new SqlExecutor(noOfThread)
def apply(noOfThread: Int, loggerFactory: NamedLoggerFactory): SqlExecutor =
new SqlExecutor(noOfThread, loggerFactory)
}

View File

@ -13,7 +13,9 @@ import com.digitalasset.platform.sandbox.stores.ledger.sql.SqlStartMode
import com.digitalasset.platform.sandbox.stores.ledger.Ledger
import com.digitalasset.daml.lf.data.ImmArray
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.stores.ledger.ScenarioLoader.LedgerEntryOrBump
import scalaz.Tag
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
@ -72,7 +74,9 @@ object LedgerResource {
packages,
ImmArray.empty,
128,
SqlStartMode.AlwaysReset))
SqlStartMode.AlwaysReset,
NamedLoggerFactory(Tag.unwrap(ledgerId))
))
ledger.setup()
}

View File

@ -13,6 +13,7 @@ import com.digitalasset.daml.lf.archive.DarReader
import com.digitalasset.daml.lf.data.{ImmArray, Ref}
import com.digitalasset.daml.lf.engine.Engine
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.services.ApiSubmissionService
import com.digitalasset.platform.sandbox.stores.ledger.CommandExecutorImpl
@ -71,7 +72,8 @@ trait TestHelpers {
indexAndWriteService.writeService,
TimeModel.reasonableDefault,
timeProvider,
new CommandExecutorImpl(Engine(), packageStore.getLfPackage)
new CommandExecutorImpl(Engine(), packageStore.getLfPackage),
NamedLoggerFactory.forParticipant(participantId)
)(ec, mat)
}

View File

@ -7,6 +7,7 @@ import java.util.concurrent.ConcurrentLinkedQueue
import akka.actor.ActorSystem
import akka.pattern.after
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.common.util.{DirectExecutionContext => DEC}
import org.scalatest.{AsyncWordSpec, Matchers}
@ -86,11 +87,13 @@ class RecoveringIndexerIT extends AsyncWordSpec with Matchers {
private[this] implicit val ec: ExecutionContext = DEC
private[this] val actorSystem = ActorSystem("RecoveringIndexerIT")
private[this] val scheduler = actorSystem.scheduler
private[this] val loggerFactory = NamedLoggerFactory(RecoveringIndexerIT.super.getClass)
"RecoveringIndexer" should {
"work when the stream completes" in {
val recoveringIndexer = new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second)
val recoveringIndexer =
new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second, loggerFactory)
val testIndexer = new TestIndexer(
List(
SubscribeResult("A", 10.millis, true, 10.millis, true)
@ -108,7 +111,8 @@ class RecoveringIndexerIT extends AsyncWordSpec with Matchers {
}
"work when the stream is stopped" in {
val recoveringIndexer = new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second)
val recoveringIndexer =
new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second, loggerFactory)
// Stream completes after 10sec, but stop() is called before
val testIndexer = new TestIndexer(
List(
@ -129,7 +133,8 @@ class RecoveringIndexerIT extends AsyncWordSpec with Matchers {
}
"recover failures" in {
val recoveringIndexer = new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second)
val recoveringIndexer =
new RecoveringIndexer(actorSystem.scheduler, 10.millis, 1.second, loggerFactory)
// Subscribe fails, then the stream fails, then the stream completes without errors.
val testIndexer = new TestIndexer(
List(
@ -156,7 +161,8 @@ class RecoveringIndexerIT extends AsyncWordSpec with Matchers {
}
"respect restart delay" in {
val recoveringIndexer = new RecoveringIndexer(actorSystem.scheduler, 500.millis, 1.second)
val recoveringIndexer =
new RecoveringIndexer(actorSystem.scheduler, 500.millis, 1.second, loggerFactory)
// Subscribe fails, then the stream completes without errors. Note the restart delay of 500ms.
val testIndexer = new TestIndexer(
List(

View File

@ -3,14 +3,17 @@
package com.digitalasset.platform.sandbox.persistence
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.sandbox.stores.ledger.sql.dao.HikariJdbcConnectionProvider
import com.digitalasset.platform.sandbox.stores.ledger.sql.migration.FlywayMigrations
import org.scalatest._
class PostgresIT extends WordSpec with Matchers with PostgresAroundAll {
private val loggerFactory = NamedLoggerFactory("PostgresIT")
private lazy val connectionProvider =
HikariJdbcConnectionProvider(postgresFixture.jdbcUrl, 4, 4)
HikariJdbcConnectionProvider(postgresFixture.jdbcUrl, 4, 4, loggerFactory)
"Postgres" when {
@ -32,7 +35,7 @@ class PostgresIT extends WordSpec with Matchers with PostgresAroundAll {
"Flyway" should {
"execute initialisation script" in {
FlywayMigrations(postgresFixture.jdbcUrl).migrate()
FlywayMigrations(postgresFixture.jdbcUrl, loggerFactory).migrate()
connectionProvider.runSQL { conn =>
def checkTableExists(table: String) = {
val resultSet = conn.createStatement().executeQuery(s"SELECT * from $table")

View File

@ -5,6 +5,7 @@ package com.digitalasset.platform.sandbox.services.admin
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import org.scalatest.{AsyncFlatSpec, Matchers}
import scala.concurrent.Future
@ -15,6 +16,7 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
private[this] val actorSystem = ActorSystem("PollingUtilsSpec")
private[this] val materializer = ActorMaterializer()(actorSystem)
private[this] val scheduler = materializer.system.scheduler
private[this] val loggerFactory = NamedLoggerFactory(this.getClass)
behavior of "pollUntilPersisted"
@ -25,7 +27,8 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
0.millis,
0.millis,
identity,
scheduler) map { attempts =>
scheduler,
loggerFactory) map { attempts =>
assert(attempts == 1)
}
}
@ -38,7 +41,8 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
0.millis,
0.millis,
identity,
scheduler) map { attempts =>
scheduler,
loggerFactory) map { attempts =>
assert(attempts == 4)
}
}
@ -51,7 +55,8 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
0.millis,
0.millis,
identity,
scheduler) map { attempts =>
scheduler,
loggerFactory) map { attempts =>
assert(attempts == 4)
}
}
@ -65,7 +70,8 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
100.millis,
100.millis,
identity,
scheduler) map { attempts =>
scheduler,
loggerFactory) map { attempts =>
assert(attempts < 11)
}
}
@ -81,7 +87,8 @@ final class PollingUtilsSpec extends AsyncFlatSpec with Matchers {
100.millis,
1000.millis,
d => d * 2,
scheduler) map { attempts =>
scheduler,
loggerFactory) map { attempts =>
assert(attempts < 6)
}
}

View File

@ -39,6 +39,7 @@ import com.digitalasset.ledger.api.domain.{
TransactionFilter
}
import com.digitalasset.ledger.api.testing.utils.AkkaBeforeAndAfterAll
import com.digitalasset.platform.common.logging.NamedLoggerFactory
import com.digitalasset.platform.participant.util.EventFilter
import com.digitalasset.platform.sandbox.persistence.PostgresAroundAll
import com.digitalasset.platform.sandbox.stores.ActiveLedgerState.ActiveContract
@ -80,15 +81,17 @@ class JdbcLedgerDaoSpec
override def beforeAll(): Unit = {
super.beforeAll()
FlywayMigrations(postgresFixture.jdbcUrl).migrate()
dbDispatcher = DbDispatcher(postgresFixture.jdbcUrl, 4, 4)
val loggerFactory = NamedLoggerFactory(JdbcLedgerDaoSpec.getClass)
FlywayMigrations(postgresFixture.jdbcUrl, loggerFactory).migrate()
dbDispatcher = DbDispatcher(postgresFixture.jdbcUrl, 4, 4, loggerFactory)
ledgerDao = JdbcLedgerDao(
dbDispatcher,
ContractSerializer,
TransactionSerializer,
ValueSerializer,
KeyHasher,
DbType.Postgres)
DbType.Postgres,
loggerFactory)
Await.result(ledgerDao.initializeLedger(LedgerId("test-ledger"), 0), 10.seconds)
}

View File

@ -15,6 +15,7 @@ import org.scalatest.{AsyncWordSpec, Matchers}
import scala.concurrent.duration._
import com.digitalasset.ledger.api.domain.LedgerId
import com.digitalasset.platform.common.logging.NamedLoggerFactory
class SqlLedgerSpec
extends AsyncWordSpec
@ -31,6 +32,8 @@ class SqlLedgerSpec
private val ledgerId: LedgerId = LedgerId(Ref.LedgerString.assertFromString("TheLedger"))
private val loggerFactory = NamedLoggerFactory(this.getClass)
"SQL Ledger" should {
"be able to be created from scratch with a random ledger id" in {
val ledgerF = SqlLedger(
@ -40,7 +43,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
ledgerF.map { ledger =>
@ -56,7 +61,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
ledgerF.map { ledger =>
@ -74,7 +81,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
ledger2 <- SqlLedger(
@ -84,7 +93,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
ledger3 <- SqlLedger(
@ -94,7 +105,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
} yield {
@ -114,7 +127,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
_ <- SqlLedger(
jdbcUrl = postgresFixture.jdbcUrl,
@ -123,7 +138,9 @@ class SqlLedgerSpec
acs = InMemoryActiveLedgerState.empty,
packages = InMemoryPackageStore.empty,
initialLedgerEntries = ImmArray.empty,
queueDepth
queueDepth,
startMode = SqlStartMode.ContinueIfExists,
loggerFactory
)
} yield (())

View File

@ -36,3 +36,4 @@ HEAD — ongoing
- [DAML Compiler] Generic template instantiations like ``template instance IouProposal = Proposal Iou`` now generate a type synonym ``type IouProposal = Proposal Iou`` that can be used in DAML. Before, they generated a ``newtype``, which cannot be used anymore.
- [DAML Compiler] Fixed a bug where ``damlc build`` sometimes did not find modules during typechecking
even if they were present during parallel compilations.
- [Ledger] Enhance logging to correlate log messages with the associated participant id in multi-participant node tests and environments