CI: Run PostgreSQL once for all Scala tests. (#5919)

This commit is contained in:
Samir Talwar 2020-05-14 09:06:34 +02:00 committed by GitHub
parent 9f43ab95ac
commit 57a8d0b37e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 215 additions and 125 deletions

View File

@ -828,6 +828,10 @@ dev_env_tool(
"bin/pg_ctl", "bin/pg_ctl",
"bin/postgres", "bin/postgres",
], ],
required_tools = {
"initdb": ["postgres"],
"pg_ctl": ["postgres"],
},
tools = [ tools = [
"createdb", "createdb",
"dropdb", "dropdb",

View File

@ -4,31 +4,42 @@
load("@bazel_tools//tools/cpp:lib_cc_configure.bzl", "get_cpu_value") load("@bazel_tools//tools/cpp:lib_cc_configure.bzl", "get_cpu_value")
load("@rules_sh//sh:posix.bzl", "posix") load("@rules_sh//sh:posix.bzl", "posix")
def _create_build_content(rule_name, tools, win_paths, nix_paths): def _create_build_content(rule_name, is_windows, tools, required_tools, win_paths, nix_paths):
content = """ content = """
# DO NOT EDIT: automatically generated BUILD file for dev_env_tool.bzl: {rule_name} # DO NOT EDIT: automatically generated BUILD file for dev_env_tool.bzl: {rule_name}
package(default_visibility = ["//visibility:public"]) package(default_visibility = ["//visibility:public"])
filegroup( filegroup(
name = "all", name = "all",
srcs = glob(["**"]), srcs = glob(["**"]),
) )
""".format(rule_name = rule_name) """.format(rule_name = rule_name)
for i in range(0, len(tools)): for i in range(0, len(tools)):
content += """ if is_windows:
content += """
# Running tools with `bazel run` is not supported on Windows.
filegroup( filegroup(
name = "{tool}", name = "{tool}",
srcs = select({{ srcs = ["{path}"],
":windows": ["{win_path}"],
"//conditions:default": ["{nix_path}"],
}}),
) )
""".format( """.format(
tool = tools[i], tool = tools[i],
win_path = win_paths[i], path = win_paths[i],
nix_path = nix_paths[i], )
) else:
content += """
sh_binary(
name = "{tool}",
srcs = ["{path}"],
data = {dependencies},
)
""".format(
tool = tools[i],
dependencies = [":{}".format(dep) for dep in required_tools.get(tools[i], [])],
path = nix_paths[i],
)
content += """ content += """
config_setting( config_setting(
@ -95,7 +106,8 @@ dadew = repository_rule(
) )
def _dev_env_tool_impl(ctx): def _dev_env_tool_impl(ctx):
if get_cpu_value(ctx) == "x64_windows": is_windows = get_cpu_value(ctx) == "x64_windows"
if is_windows:
ps = ctx.which("powershell") ps = ctx.which("powershell")
dadew = _dadew_where(ctx, ps) dadew = _dadew_where(ctx, ps)
find = _dadew_tool_home(dadew, "msys2") + "\\usr\\bin\\find.exe" find = _dadew_tool_home(dadew, "msys2") + "\\usr\\bin\\find.exe"
@ -119,7 +131,9 @@ def _dev_env_tool_impl(ctx):
build_path = ctx.path("BUILD") build_path = ctx.path("BUILD")
build_content = _create_build_content( build_content = _create_build_content(
rule_name = ctx.name, rule_name = ctx.name,
is_windows = is_windows,
tools = ctx.attr.tools, tools = ctx.attr.tools,
required_tools = ctx.attr.required_tools,
win_paths = [ win_paths = [
"%s/%s" % (ctx.attr.prefix, path) "%s/%s" % (ctx.attr.prefix, path)
for path in ctx.attr.win_paths for path in ctx.attr.win_paths
@ -137,6 +151,10 @@ dev_env_tool = repository_rule(
"tools": attr.string_list( "tools": attr.string_list(
mandatory = True, mandatory = True,
), ),
"required_tools": attr.string_list_dict(
mandatory = False,
default = {},
),
"win_tool": attr.string( "win_tool": attr.string(
mandatory = True, mandatory = True,
), ),

View File

@ -2,10 +2,9 @@
# Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. # Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
set -euo pipefail set -euo pipefail
eval "$($(dirname "$0")/dev-env/bin/dade-assist)" eval "$("$(dirname "$0")/dev-env/bin/dade-assist")"
execution_log_postfix=${1:-} execution_log_postfix=${1:-}
@ -18,23 +17,59 @@ if [[ "$execution_log_postfix" == "_Darwin" ]]; then
tag_filter="-dont-run-on-darwin,-scaladoc,-pdfdocs" tag_filter="-dont-run-on-darwin,-scaladoc,-pdfdocs"
fi fi
# Bazel test only builds targets that are dependencies of a test suite # Bazel test only builds targets that are dependencies of a test suite so do a full build first.
# so do a full build first.
bazel build //... --build_tag_filters "$tag_filter" bazel build //... --build_tag_filters "$tag_filter"
bazel test //... --build_tag_filters "$tag_filter" --test_tag_filters "$tag_filter" --experimental_execution_log_file "$ARTIFACT_DIRS/test_execution${execution_log_postfix}.log"
# Make sure that Bazel query works. # Set up a shared PostgreSQL instance.
bazel query 'deps(//...)' > /dev/null export POSTGRESQL_ROOT_DIR="${TMPDIR:-/tmp}/daml/postgresql"
# Check that we can load damlc in ghci export POSTGRESQL_DATA_DIR="${POSTGRESQL_ROOT_DIR}/data"
GHCI_SCRIPT=$(mktemp) export POSTGRESQL_LOG_FILE="${POSTGRESQL_ROOT_DIR}/postgresql.log"
function cleanup { export POSTGRESQL_HOST='localhost'
rm -rf "$GHCI_SCRIPT" export POSTGRESQL_PORT=54321
export POSTGRESQL_USERNAME='test'
export POSTGRESQL_PASSWORD=''
function start_postgresql() {
mkdir -p "$POSTGRESQL_DATA_DIR"
bazel run -- @postgresql_dev_env//:initdb --auth=trust --encoding=UNICODE --locale=en_US.UTF-8 --username="$POSTGRESQL_USERNAME" "$POSTGRESQL_DATA_DIR"
envsubst -no-unset -i ci/postgresql.conf -o "$POSTGRESQL_DATA_DIR/postgresql.conf"
bazel run -- @postgresql_dev_env//:pg_ctl -w --pgdata="$POSTGRESQL_DATA_DIR" --log="$POSTGRESQL_LOG_FILE" start || {
if [[ -f "$POSTGRESQL_LOG_FILE" ]]; then
echo >&2 'PostgreSQL logs:'
cat >&2 "$POSTGRESQL_LOG_FILE"
fi
return 1
}
} }
trap cleanup EXIT function stop_postgresql() {
if [[ -e "$POSTGRESQL_DATA_DIR" ]]; then
bazel run -- @postgresql_dev_env//:pg_ctl -w --pgdata="$POSTGRESQL_DATA_DIR" --mode=immediate stop || :
rm -rf "$POSTGRESQL_ROOT_DIR"
fi
}
trap stop_postgresql EXIT
stop_postgresql # in case it's running from a previous build
start_postgresql
# Run the tests.
bazel test //... \
--build_tag_filters "$tag_filter" \
--test_tag_filters "$tag_filter" \
--test_env "POSTGRESQL_HOST=${POSTGRESQL_HOST}" \
--test_env "POSTGRESQL_PORT=${POSTGRESQL_PORT}" \
--test_env "POSTGRESQL_USERNAME=${POSTGRESQL_USERNAME}" \
--test_env "POSTGRESQL_PASSWORD=${POSTGRESQL_PASSWORD}" \
--experimental_execution_log_file "$ARTIFACT_DIRS/test_execution${execution_log_postfix}.log"
# Make sure that Bazel query works.
bazel query 'deps(//...)' >/dev/null
# Check that we can load damlc in ghci
# Disabled on darwin since it sometimes seem to hang and this only # Disabled on darwin since it sometimes seem to hang and this only
# tests our dev setup rather than our code so issues are not critical. # tests our dev setup rather than our code so issues are not critical.
if [[ "$(uname)" != "Darwin" ]]; then if [[ "$(uname)" != "Darwin" ]]; then
da-ghci --data yes //compiler/damlc:damlc -e ':main --help' da-ghci --data yes //compiler/damlc:damlc -e ':main --help'
fi fi
# Check that our IDE works on our codebase # Check that our IDE works on our codebase
ghcide compiler/damlc/exe/Main.hs 2>&1 | tee ide-log ghcide compiler/damlc/exe/Main.hs 2>&1 | tee ide-log
grep -q "1 file worked, 0 files failed" ide-log grep -q "1 file worked, 0 files failed" ide-log

8
ci/postgresql.conf Normal file
View File

@ -0,0 +1,8 @@
listen_addresses = '${POSTGRESQL_HOST}'
port = ${POSTGRESQL_PORT}
unix_socket_directories = '${POSTGRESQL_ROOT_DIR}'
fsync = off
synchronous_commit = off
full_page_writes = off
log_min_duration_statement = 0
log_connections = on

View File

@ -27,8 +27,10 @@ genrule(
set -euo pipefail set -euo pipefail
TMP_DIR=$$(mktemp -d) TMP_DIR=$$(mktemp -d)
MVN_DB="$$TMP_DIR/m2" MVN_DB="$$TMP_DIR/m2"
MVN=($(locations @mvn_dev_env//:mvn))
MVN="$${{MVN[0]}}"
install_mvn() {{ install_mvn() {{
$(location @mvn_dev_env//:mvn) -q install:install-file \ "$$MVN" -q install:install-file \
-Dmaven.repo.local=$$MVN_DB \ -Dmaven.repo.local=$$MVN_DB \
"-DgroupId=$$1" \ "-DgroupId=$$1" \
"-DartifactId=$$2" \ "-DartifactId=$$2" \
@ -62,7 +64,7 @@ genrule(
"com.daml" "ledger-api-auth-client" \ "com.daml" "ledger-api-auth-client" \
$(location //ledger/ledger-api-auth-client:libledger-api-auth-client.jar) \ $(location //ledger/ledger-api-auth-client:libledger-api-auth-client.jar) \
$(location //ledger/ledger-api-auth-client:ledger-api-auth-client_pom.xml) $(location //ledger/ledger-api-auth-client:ledger-api-auth-client_pom.xml)
$(location @mvn_dev_env//:mvn) -q -Dmaven.repo.local=$$MVN_DB -f "$$TMP_DIR/quickstart-java/pom.xml" dependency:resolve dependency:resolve-plugins "$$MVN" -q -Dmaven.repo.local=$$MVN_DB -f "$$TMP_DIR/quickstart-java/pom.xml" dependency:resolve dependency:resolve-plugins
tar cf $(location integration-tests-mvn.tar) -C $$(dirname $$MVN_DB) $$(basename $$MVN_DB) \ tar cf $(location integration-tests-mvn.tar) -C $$(dirname $$MVN_DB) $$(basename $$MVN_DB) \
--owner=0 --group=0 --numeric-owner --mtime=2000-01-01\ 00:00Z --sort=name --owner=0 --group=0 --numeric-owner --mtime=2000-01-01\ 00:00Z --sort=name
""".format(mvn = mvn_version), """.format(mvn = mvn_version),

1
dev-env/bin/envsubst Symbolic link
View File

@ -0,0 +1 @@
../lib/dade-exec-nix-bin-tool

View File

@ -50,8 +50,8 @@ trait ExtractorFixture extends SandboxFixture with PostgresAroundSuite with Type
protected def target: PostgreSQLTarget = PostgreSQLTarget( protected def target: PostgreSQLTarget = PostgreSQLTarget(
connectUrl = postgresDatabase.url, connectUrl = postgresDatabase.url,
user = PostgresAround.userName, user = postgresDatabase.userName,
password = PostgresAround.password, password = postgresDatabase.password,
outputFormat = outputFormat, outputFormat = outputFormat,
schemaPerPackage = false, schemaPerPackage = false,
mergeIdentical = false, mergeIdentical = false,

View File

@ -14,9 +14,9 @@ object MainWithEphemeralPostgresql extends PostgresAround {
.parse[Unit]("SQL Ledger", _ => (), (), args) .parse[Unit]("SQL Ledger", _ => (), (), args)
.getOrElse(sys.exit(1)) .getOrElse(sys.exit(1))
startEphemeralPostgres() connectToPostgresqlServer()
val database = createNewRandomDatabase() val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres()) sys.addShutdownHook(disconnectFromPostgresqlServer())
val config = originalConfig.copy( val config = originalConfig.copy(
participants = originalConfig.participants.map(_.copy(serverJdbcUrl = database.url)), participants = originalConfig.participants.map(_.copy(serverJdbcUrl = database.url)),
extra = ExtraConfig(jdbcUrl = Some(database.url)), extra = ExtraConfig(jdbcUrl = Some(database.url)),

View File

@ -8,9 +8,9 @@ import com.daml.testing.postgresql.PostgresAround
object MainWithEphemeralPostgresql extends PostgresAround { object MainWithEphemeralPostgresql extends PostgresAround {
def main(args: Array[String]): Unit = { def main(args: Array[String]): Unit = {
startEphemeralPostgres() connectToPostgresqlServer()
val database = createNewRandomDatabase() val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres()) sys.addShutdownHook(disconnectFromPostgresqlServer())
SandboxMain.main(args ++ Array("--sql-backend-jdbcurl", database.url)) SandboxMain.main(args ++ Array("--sql-backend-jdbcurl", database.url))
} }
} }

View File

@ -8,9 +8,9 @@ import com.daml.testing.postgresql.PostgresAround
object MainWithEphemeralPostgresql extends PostgresAround { object MainWithEphemeralPostgresql extends PostgresAround {
def main(args: Array[String]): Unit = { def main(args: Array[String]): Unit = {
startEphemeralPostgres() connectToPostgresqlServer()
val database = createNewRandomDatabase() val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres()) sys.addShutdownHook(disconnectFromPostgresqlServer())
Main.main(args ++ Array("--sql-backend-jdbcurl", database.url)) Main.main(args ++ Array("--sql-backend-jdbcurl", database.url))
} }
} }

View File

@ -10,7 +10,7 @@ import com.daml.api.util.TimeProvider
import com.daml.bazeltools.BazelRunfiles.rlocation import com.daml.bazeltools.BazelRunfiles.rlocation
import com.daml.daml_lf_dev.DamlLf import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.domain.LedgerId import com.daml.ledger.api.domain.LedgerId
import com.daml.ledger.api.health.{Healthy, Unhealthy} import com.daml.ledger.api.health.Healthy
import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll
import com.daml.ledger.participant.state.v1.ParticipantId import com.daml.ledger.participant.state.v1.ParticipantId
import com.daml.lf.archive.DarReader import com.daml.lf.archive.DarReader
@ -146,32 +146,6 @@ class SqlLedgerSpec
ledger.currentHealth() should be(Healthy) ledger.currentHealth() should be(Healthy)
} }
} }
"be unhealthy if the underlying database is inaccessible 3 or more times in a row" in {
for {
ledger <- createSqlLedger()
} yield {
withClue("before shutting down postgres,") {
ledger.currentHealth() should be(Healthy)
}
stopPostgres()
eventually {
withClue("after shutting down postgres,") {
ledger.currentHealth() should be(Unhealthy)
}
}
startPostgres()
eventually {
withClue("after starting up postgres,") {
ledger.currentHealth() should be(Healthy)
}
}
}
}
} }
private def createSqlLedger(): Future[Ledger] = private def createSqlLedger(): Future[Ledger] =

View File

@ -10,54 +10,86 @@ import java.nio.file.{Files, Path}
import java.util.UUID import java.util.UUID
import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicBoolean
import com.daml.ports.Port
import com.daml.testing.postgresql.PostgresAround._ import com.daml.testing.postgresql.PostgresAround._
import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.commons.io.{FileUtils, IOUtils}
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import scala.collection.JavaConverters.asScalaBufferConverter import scala.collection.JavaConverters.asScalaBufferConverter
import scala.util.Try
import scala.util.control.NonFatal import scala.util.control.NonFatal
trait PostgresAround { trait PostgresAround {
@volatile @volatile private var server: PostgresServer = _
private var fixture: PostgresFixture = _ @volatile private var paths: Option[PostgresServerPaths] = None
private val started: AtomicBoolean = new AtomicBoolean(false) private val started: AtomicBoolean = new AtomicBoolean(false)
protected def startEphemeralPostgres(): Unit = { protected def connectToPostgresqlServer(): Unit = {
(
sys.env.get("POSTGRESQL_HOST"),
sys.env.get("POSTGRESQL_PORT").map(port => Port(port.toInt)),
sys.env.get("POSTGRESQL_USERNAME"),
sys.env.get("POSTGRESQL_PASSWORD"),
) match {
case (Some(hostName), Some(port), Some(userName), Some(password)) =>
connectToSharedServer(hostName, port, userName, password)
case _ =>
startEphemeralServer()
}
}
private def connectToSharedServer(
hostName: String,
port: Port,
userName: String,
password: String,
): Unit = {
logger.info(s"Connected to PostgreSQL on $hostName:$port.")
server = PostgresServer(hostName, port, userName, password)
}
private def startEphemeralServer(): Unit = {
logger.info("Starting an ephemeral PostgreSQL instance...") logger.info("Starting an ephemeral PostgreSQL instance...")
val tempDir = Files.createTempDirectory("postgres_test") val root = Files.createTempDirectory("postgres_test")
val dataDir = tempDir.resolve("data") val dataDir = root.resolve("data")
val confFile = dataDir.resolve("postgresql.conf") val configPath = dataDir.resolve("postgresql.conf")
val logFile = Files.createFile(tempDir.resolve("postgresql.log")) val logFile = Files.createFile(root.resolve("postgresql.log"))
val lockedPort = FreePort.find() val lockedPort = FreePort.find()
val hostName = InetAddress.getLoopbackAddress.getHostAddress
val port = lockedPort.port val port = lockedPort.port
fixture = PostgresFixture(port, tempDir, dataDir, confFile, logFile) val userName = "test"
val password = ""
server = PostgresServer(hostName, port, userName, password)
paths = Some(PostgresServerPaths(root, dataDir, logFile))
try { try {
initializeDatabase() initializeDatabase(dataDir, userName)
createConfigFile() createConfigFile(configPath)
startPostgres() startPostgresql(dataDir, logFile)
lockedPort.unlock() lockedPort.unlock()
logger.info(s"PostgreSQL has started on port $port.") logger.info(s"PostgreSQL has started on port $port.")
} catch { } catch {
case NonFatal(e) => case NonFatal(e) =>
lockedPort.unlock() lockedPort.unlock()
stopPostgres() stopPostgresql(dataDir)
deleteRecursively(tempDir) deleteRecursively(root)
fixture = null
throw e throw e
} }
} }
protected def stopAndCleanUpPostgres(): Unit = { protected def disconnectFromPostgresqlServer(): Unit = {
logger.info("Stopping and cleaning up PostgreSQL...") paths foreach {
stopPostgres() case PostgresServerPaths(root, dataDir, _) =>
deleteRecursively(fixture.tempDir) logger.info("Stopping and cleaning up PostgreSQL...")
logger.info("PostgreSQL has stopped, and the data directory has been deleted.") stopPostgresql(dataDir)
fixture = null deleteRecursively(root)
logger.info("PostgreSQL has stopped, and the data directory has been deleted.")
}
server = null
} }
protected def startPostgres(): Unit = { private def startPostgresql(dataDir: Path, logFile: Path): Unit = {
if (!started.compareAndSet(false, true)) { if (!started.compareAndSet(false, true)) {
throw new IllegalStateException( throw new IllegalStateException(
"Attempted to start PostgreSQL, but it has already been started.", "Attempted to start PostgreSQL, but it has already been started.",
@ -69,9 +101,9 @@ trait PostgresAround {
Tool.pg_ctl, Tool.pg_ctl,
"-w", "-w",
"-D", "-D",
fixture.dataDir.toString, dataDir.toString,
"-l", "-l",
fixture.logFile.toString, logFile.toString,
"start", "start",
) )
} catch { } catch {
@ -82,7 +114,7 @@ trait PostgresAround {
} }
} }
protected def stopPostgres(): Unit = { private def stopPostgresql(dataDir: Path): Unit = {
if (started.compareAndSet(true, false)) { if (started.compareAndSet(true, false)) {
logger.info("Stopping PostgreSQL...") logger.info("Stopping PostgreSQL...")
run( run(
@ -90,7 +122,7 @@ trait PostgresAround {
Tool.pg_ctl, Tool.pg_ctl,
"-w", "-w",
"-D", "-D",
fixture.dataDir.toString, dataDir.toString,
"-m", "-m",
"immediate", "immediate",
"stop", "stop",
@ -103,12 +135,12 @@ trait PostgresAround {
createNewDatabase(UUID.randomUUID().toString) createNewDatabase(UUID.randomUUID().toString)
protected def createNewDatabase(name: String): PostgresDatabase = { protected def createNewDatabase(name: String): PostgresDatabase = {
val database = PostgresDatabase(hostName, fixture.port, userName, name) val database = PostgresDatabase(server, name)
createDatabase(database) createDatabase(database)
database database
} }
private def initializeDatabase(): Unit = run( private def initializeDatabase(dataDir: Path, userName: String): Unit = run(
"initialize the PostgreSQL database", "initialize the PostgreSQL database",
Tool.initdb, Tool.initdb,
s"--username=$userName", s"--username=$userName",
@ -117,10 +149,10 @@ trait PostgresAround {
"UNICODE", "UNICODE",
"-A", "-A",
"trust", "trust",
fixture.dataDir.toString.replaceAllLiterally("\\", "/"), dataDir.toString.replaceAllLiterally("\\", "/"),
) )
private def createConfigFile(): Unit = { private def createConfigFile(configPath: Path): Unit = {
// taken from here: https://bitbucket.org/eradman/ephemeralpg/src/1b5a3c6be81c69a860b7bd540a16b1249d3e50e2/pg_tmp.sh?at=default&fileviewer=file-view-default#pg_tmp.sh-54 // taken from here: https://bitbucket.org/eradman/ephemeralpg/src/1b5a3c6be81c69a860b7bd540a16b1249d3e50e2/pg_tmp.sh?at=default&fileviewer=file-view-default#pg_tmp.sh-54
// We set unix_socket_directories to /tmp rather than tempDir // We set unix_socket_directories to /tmp rather than tempDir
// since the latter will refer to a temporary directory set by // since the latter will refer to a temporary directory set by
@ -129,16 +161,16 @@ trait PostgresAround {
// this option is ignored. // this option is ignored.
val configText = val configText =
s"""|unix_socket_directories = '/tmp' s"""|unix_socket_directories = '/tmp'
|shared_buffers = 12MB |shared_buffers = 12MB
|fsync = off |fsync = off
|synchronous_commit = off |synchronous_commit = off
|full_page_writes = off |full_page_writes = off
|log_min_duration_statement = 0 |log_min_duration_statement = 0
|log_connections = on |log_connections = on
|listen_addresses = '$hostName' |listen_addresses = '${server.hostName}'
|port = ${fixture.port} |port = ${server.port}
""".stripMargin """.stripMargin
Files.write(fixture.confFile, configText.getBytes(StandardCharsets.UTF_8)) Files.write(configPath, configText.getBytes(StandardCharsets.UTF_8))
() ()
} }
@ -176,7 +208,7 @@ trait PostgresAround {
IOUtils.copy(process.getInputStream, stdout, StandardCharsets.UTF_8) IOUtils.copy(process.getInputStream, stdout, StandardCharsets.UTF_8)
val stderr = new StringWriter val stderr = new StringWriter
IOUtils.copy(process.getErrorStream, stderr, StandardCharsets.UTF_8) IOUtils.copy(process.getErrorStream, stderr, StandardCharsets.UTF_8)
val logs = Files.readAllLines(fixture.logFile).asScala val logs = readLogs()
throw new ProcessFailedException( throw new ProcessFailedException(
description = description, description = description,
command = command, command = command,
@ -189,7 +221,7 @@ trait PostgresAround {
case e: ProcessFailedException => case e: ProcessFailedException =>
throw e throw e
case NonFatal(e) => case NonFatal(e) =>
val logs = Files.readAllLines(fixture.logFile).asScala val logs = readLogs()
throw new ProcessFailedException( throw new ProcessFailedException(
description = description, description = description,
command = command, command = command,
@ -199,6 +231,10 @@ trait PostgresAround {
} }
} }
private def readLogs(): Seq[String] =
Try(paths.map(paths => Files.readAllLines(paths.logFile).asScala).getOrElse(Seq.empty))
.getOrElse(Seq.empty)
private def deleteRecursively(tempDir: Path): Unit = private def deleteRecursively(tempDir: Path): Unit =
FileUtils.deleteDirectory(tempDir.toFile) FileUtils.deleteDirectory(tempDir.toFile)
} }
@ -206,11 +242,6 @@ trait PostgresAround {
object PostgresAround { object PostgresAround {
private val logger = LoggerFactory.getLogger(getClass) private val logger = LoggerFactory.getLogger(getClass)
private val hostName = InetAddress.getLoopbackAddress.getHostName
val userName = "test"
val password = ""
private class ProcessFailedException( private class ProcessFailedException(
description: String, description: String,
command: Seq[String], command: Seq[String],

View File

@ -11,13 +11,13 @@ trait PostgresAroundAll extends PostgresAroundSuite with BeforeAndAfterAll {
override protected def beforeAll(): Unit = { override protected def beforeAll(): Unit = {
// We start PostgreSQL before calling `super` because _generally_ the database needs to be up // We start PostgreSQL before calling `super` because _generally_ the database needs to be up
// before everything else. // before everything else.
startEphemeralPostgres() connectToPostgresqlServer()
createNewDatabase() createNewDatabase()
super.beforeAll() super.beforeAll()
} }
override protected def afterAll(): Unit = { override protected def afterAll(): Unit = {
super.afterAll() super.afterAll()
stopAndCleanUpPostgres() disconnectFromPostgresqlServer()
} }
} }

View File

@ -14,13 +14,13 @@ trait PostgresAroundEach
override protected def beforeAll(): Unit = { override protected def beforeAll(): Unit = {
// We start PostgreSQL before calling `super` because _generally_ the database needs to be up // We start PostgreSQL before calling `super` because _generally_ the database needs to be up
// before everything else. // before everything else.
startEphemeralPostgres() connectToPostgresqlServer()
super.beforeAll() super.beforeAll()
} }
override protected def afterAll(): Unit = { override protected def afterAll(): Unit = {
super.afterAll() super.afterAll()
stopAndCleanUpPostgres() disconnectFromPostgresqlServer()
} }
override protected def beforeEach(): Unit = { override protected def beforeEach(): Unit = {

View File

@ -5,13 +5,20 @@ package com.daml.testing.postgresql
import com.daml.ports.Port import com.daml.ports.Port
final case class PostgresDatabase( final case class PostgresDatabase private[postgresql] (
hostName: String, private val server: PostgresServer,
port: Port,
userName: String,
databaseName: String, databaseName: String,
) { ) {
def url: String = s"jdbc:postgresql://$hostName:$port/$databaseName?user=$userName" def hostName: String = server.hostName
def port: Port = server.port
def userName: String = server.userName
def password: String = server.password
def url: String =
s"jdbc:postgresql://$hostName:$port/$databaseName?user=$userName&password=$password"
override def toString: String = url override def toString: String = url
} }

View File

@ -14,8 +14,8 @@ object PostgresResource {
implicit executionContext: ExecutionContext implicit executionContext: ExecutionContext
): Resource[PostgresDatabase] = ): Resource[PostgresDatabase] =
Resource(Future { Resource(Future {
startEphemeralPostgres() connectToPostgresqlServer()
createNewRandomDatabase() createNewRandomDatabase()
})(_ => Future(stopAndCleanUpPostgres())) })(_ => Future(disconnectFromPostgresqlServer()))
} }
} }

View File

@ -0,0 +1,13 @@
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.testing.postgresql
import com.daml.ports.Port
final case class PostgresServer(
hostName: String,
port: Port,
userName: String,
password: String,
)

View File

@ -5,12 +5,8 @@ package com.daml.testing.postgresql
import java.nio.file.Path import java.nio.file.Path
import com.daml.ports.Port case class PostgresServerPaths(
root: Path,
final case class PostgresFixture(
port: Port,
tempDir: Path,
dataDir: Path, dataDir: Path,
confFile: Path,
logFile: Path, logFile: Path,
) )

View File

@ -204,6 +204,7 @@ in rec {
xmlstarlet = pkgs.xmlstarlet; xmlstarlet = pkgs.xmlstarlet;
grep = pkgs.gnugrep; grep = pkgs.gnugrep;
bc = pkgs.bc; bc = pkgs.bc;
envsubst = pkgs.envsubst;
# Cryptography tooling # Cryptography tooling
gnupg = pkgs.gnupg; gnupg = pkgs.gnupg;