postgresql-testing: Use a new, random database name every time. (#5911)

* postgresql-testing: Store the JDBC URL separately.

* postgresql-testing: Expose the username and password.

* postgresql-testing: Get the caller to create the database.

And make sure it's a random one, not "test".

CHANGELOG_BEGIN
CHANGELOG_END

* postgresql-testing: Only store the JDBC URL for tests.

Less mutable state, innit.

* postgresql-testing: Capture the individual JDBC URL parameters.

* Bazel: Fix PostgreSQL binary paths.

* postgresql-testing: Just recreate the database in PostgresAroundEach.

There's no need to restart the process with a different data directory.
This commit is contained in:
Samir Talwar 2020-05-08 14:36:42 +02:00 committed by GitHub
parent 09c80df272
commit 208d4a50da
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 165 additions and 91 deletions

View File

@ -820,16 +820,18 @@ dev_env_tool(
],
nix_label = "@postgresql_nix",
nix_paths = [
"bin/initdb",
"bin/createdb",
"bin/dropdb",
"bin/initdb",
"bin/pg_ctl",
"bin/postgres",
],
tools = [
"createdb",
"dropdb",
"initdb",
"pg_ctl",
"postgresql",
"postgres",
],
win_include = [
"mingw64/bin",
@ -844,8 +846,9 @@ dev_env_tool(
"mingw64/share": "share",
},
win_paths = [
"bin/initdb.exe",
"bin/createdb.exe",
"bin/dropdb.exe",
"bin/initdb.exe",
"bin/pg_ctl.exe",
"bin/postgres.exe",
],

View File

@ -4,15 +4,15 @@
package com.daml.extractor.services
import cats.effect.{ContextShift, IO}
import com.daml.lf.data.Ref.Party
import com.daml.extractor.Extractor
import com.daml.extractor.config.{ExtractorConfig, SnapshotEndSetting}
import com.daml.extractor.targets.PostgreSQLTarget
import com.daml.ledger.api.tls.TlsConfiguration
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import com.daml.lf.data.Ref.Party
import com.daml.platform.sandbox.services.SandboxFixture
import com.daml.ports.Port
import com.daml.testing.postgresql.PostgresAround
import com.daml.testing.postgresql.{PostgresAround, PostgresAroundSuite}
import doobie._
import doobie.implicits._
import doobie.util.transactor.Transactor.Aux
@ -22,7 +22,7 @@ import scalaz.OneAnd
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
trait ExtractorFixture extends SandboxFixture with PostgresAround with Types {
trait ExtractorFixture extends SandboxFixture with PostgresAroundSuite with Types {
self: Suite =>
implicit val cs: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
@ -49,9 +49,9 @@ trait ExtractorFixture extends SandboxFixture with PostgresAround with Types {
protected def configureExtractor(ec: ExtractorConfig): ExtractorConfig = ec
protected def target: PostgreSQLTarget = PostgreSQLTarget(
connectUrl = postgresFixture.jdbcUrl,
user = "test",
password = "",
connectUrl = postgresDatabase.url,
user = PostgresAround.userName,
password = PostgresAround.password,
outputFormat = outputFormat,
schemaPerPackage = false,
mergeIdentical = false,

View File

@ -20,7 +20,7 @@ class HttpServiceWithPostgresIntTest
// has to be lazy because postgresFixture is NOT initialized yet
private lazy val jdbcConfig_ = JdbcConfig(
driver = "org.postgresql.Driver",
url = postgresFixture.jdbcUrl,
url = postgresDatabase.url,
user = "test",
password = "",
createSchema = true)

View File

@ -15,11 +15,11 @@ object MainWithEphemeralPostgresql extends PostgresAround {
.getOrElse(sys.exit(1))
startEphemeralPostgres()
val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres())
val config = originalConfig.copy(
participants =
originalConfig.participants.map(_.copy(serverJdbcUrl = postgresFixture.jdbcUrl)),
extra = ExtraConfig(jdbcUrl = Some(postgresFixture.jdbcUrl)),
participants = originalConfig.participants.map(_.copy(serverJdbcUrl = database.url)),
extra = ExtraConfig(jdbcUrl = Some(database.url)),
)
new ProgramResource(new Runner("SQL Ledger", SqlLedgerFactory).owner(config)).run()
}

View File

@ -15,8 +15,8 @@ class PostgresqlSqlLedgerReaderWriterIntegrationSpec
override protected def jdbcUrl(id: String): String = {
if (!databases.contains(id)) {
val jdbcUrl = createNewDatabase(id).jdbcUrl
databases += id -> jdbcUrl
val database = createNewDatabase(id)
databases += id -> database.url
}
databases(id)
}

View File

@ -5,10 +5,10 @@ package com.daml.platform.sandbox.perf
import java.io.File
import com.daml.lf.archive.UniversalArchiveReader
import com.daml.lf.data.Ref
import com.daml.ledger.api.domain.LedgerId
import com.daml.ledger.api.testing.utils.{OwnedResource, Resource}
import com.daml.lf.archive.UniversalArchiveReader
import com.daml.lf.data.Ref
import com.daml.platform.common.LedgerIdMode
import com.daml.platform.sandbox.SandboxServer
import com.daml.platform.sandbox.config.SandboxConfig
@ -47,7 +47,7 @@ object LedgerFactories {
case `mem` =>
ResourceOwner.successful(None)
case `sql` =>
PostgresResource.owner().map(fixture => Some(fixture.jdbcUrl))
PostgresResource.owner().map(database => Some(database.url))
}
server <- SandboxServer.owner(sandboxConfig(jdbcUrl, darFiles))
channel <- GrpcClientResource.owner(server.port)

View File

@ -5,11 +5,11 @@ package com.daml.platform.sandbox
import akka.stream.Materializer
import com.codahale.metrics.MetricRegistry
import com.daml.ledger.participant.state.v1.ParticipantId
import com.daml.api.util.TimeProvider
import com.daml.lf.data.ImmArray
import com.daml.ledger.api.domain.LedgerId
import com.daml.ledger.api.testing.utils.{OwnedResource, Resource}
import com.daml.ledger.participant.state.v1.ParticipantId
import com.daml.lf.data.ImmArray
import com.daml.logging.LoggingContext
import com.daml.metrics.Metrics
import com.daml.platform.common.LedgerIdMode
@ -60,10 +60,10 @@ object LedgerResource {
): Resource[Ledger] =
new OwnedResource(
for {
postgres <- PostgresResource.owner()
database <- PostgresResource.owner()
ledger <- SqlLedger.owner(
serverRole = ServerRole.Testing(testClass),
jdbcUrl = postgres.jdbcUrl,
jdbcUrl = database.url,
ledgerId = LedgerIdMode.Static(ledgerId),
participantId = participantId,
timeProvider = timeProvider,

View File

@ -15,7 +15,7 @@ object SandboxBackend {
trait Postgresql { this: AbstractSandboxFixture =>
override protected final def database: Option[ResourceOwner[DbInfo]] =
Some(PostgresResource.owner().map(resource => DbInfo(resource.jdbcUrl, DbType.Postgres)))
Some(PostgresResource.owner().map(database => DbInfo(database.url, DbType.Postgres)))
}
trait H2Database { this: AbstractSandboxFixture =>

View File

@ -9,7 +9,8 @@ import com.daml.testing.postgresql.PostgresAround
object MainWithEphemeralPostgresql extends PostgresAround {
def main(args: Array[String]): Unit = {
startEphemeralPostgres()
val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres())
SandboxMain.main(args ++ Array("--sql-backend-jdbcurl", postgresFixture.jdbcUrl))
SandboxMain.main(args ++ Array("--sql-backend-jdbcurl", database.url))
}
}

View File

@ -9,7 +9,8 @@ import com.daml.testing.postgresql.PostgresAround
object MainWithEphemeralPostgresql extends PostgresAround {
def main(args: Array[String]): Unit = {
startEphemeralPostgres()
val database = createNewRandomDatabase()
sys.addShutdownHook(stopAndCleanUpPostgres())
Main.main(args ++ Array("--sql-backend-jdbcurl", postgresFixture.jdbcUrl))
Main.main(args ++ Array("--sql-backend-jdbcurl", database.url))
}
}

View File

@ -12,6 +12,6 @@ private[dao] trait JdbcLedgerDaoBackendPostgresql
with PostgresAroundAll { this: Suite =>
override protected val dbType: DbType = DbType.Postgres
override protected def jdbcUrl: String = postgresFixture.jdbcUrl
override protected def jdbcUrl: String = postgresDatabase.url
}

View File

@ -26,7 +26,7 @@ class PostgresIT extends AsyncWordSpec with Matchers with PostgresAroundAll with
connectionProviderResource = HikariJdbcConnectionProvider
.owner(
ServerRole.Testing(getClass),
postgresFixture.jdbcUrl,
postgresDatabase.url,
maxConnections = 4,
new MetricRegistry,
)
@ -55,7 +55,7 @@ class PostgresIT extends AsyncWordSpec with Matchers with PostgresAroundAll with
"Flyway" should {
"execute initialisation script" in {
newLoggingContext { implicit logCtx =>
new FlywayMigrations(postgresFixture.jdbcUrl).migrate()(DirectExecutionContext)
new FlywayMigrations(postgresDatabase.url).migrate()(DirectExecutionContext)
}.map { _ =>
connectionProvider.runSQL { conn =>
def checkTableExists(table: String) = {

View File

@ -5,9 +5,9 @@ package com.daml.platform.sandbox.services.identity
import java.util.UUID
import com.daml.lf.data.Ref
import com.daml.ledger.api.domain.LedgerId
import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundEach
import com.daml.lf.data.Ref
import com.daml.platform.common.LedgerIdMode
import com.daml.platform.sandbox.SandboxBackend
import com.daml.platform.sandbox.config.SandboxConfig
@ -97,7 +97,7 @@ final class LedgerIdentityServicePostgresDynamicSharedPostgresIT
override protected def config: SandboxConfig =
super.config
.copy(
jdbcUrl = Some(postgresFixture.jdbcUrl),
jdbcUrl = Some(postgresDatabase.url),
ledgerIdMode = Option(firstRunLedgerId).fold[LedgerIdMode](LedgerIdMode.Dynamic)(id =>
LedgerIdMode.Static(LedgerId(Ref.LedgerString.assertFromString(id))))
)

View File

@ -6,15 +6,15 @@ package com.daml.platform.sandbox.stores.ledger.sql
import java.nio.file.Paths
import java.time.Instant
import com.daml.ledger.participant.state.v1.ParticipantId
import com.daml.api.util.TimeProvider
import com.daml.bazeltools.BazelRunfiles.rlocation
import com.daml.lf.archive.DarReader
import com.daml.lf.data.{ImmArray, Ref}
import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.domain.LedgerId
import com.daml.ledger.api.health.{Healthy, Unhealthy}
import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll
import com.daml.ledger.participant.state.v1.ParticipantId
import com.daml.lf.archive.DarReader
import com.daml.lf.data.{ImmArray, Ref}
import com.daml.logging.LoggingContext.newLoggingContext
import com.daml.metrics.Metrics
import com.daml.platform.common.LedgerIdMode
@ -202,7 +202,7 @@ class SqlLedgerSpec
SqlLedger
.owner(
serverRole = ServerRole.Testing(getClass),
jdbcUrl = postgresFixture.jdbcUrl,
jdbcUrl = postgresDatabase.url,
ledgerId = ledgerId.fold[LedgerIdMode](LedgerIdMode.Dynamic)(LedgerIdMode.Static),
participantId = participantId,
timeProvider = TimeProvider.UTC,

View File

@ -6,7 +6,8 @@ package com.daml.testing.postgresql
import java.io.StringWriter
import java.net.InetAddress
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}
import java.nio.file.{Files, Path}
import java.util.UUID
import java.util.concurrent.atomic.AtomicBoolean
import com.daml.testing.postgresql.PostgresAround._
@ -18,7 +19,7 @@ import scala.util.control.NonFatal
trait PostgresAround {
@volatile
protected var postgresFixture: PostgresFixture = _
private var fixture: PostgresFixture = _
private val started: AtomicBoolean = new AtomicBoolean(false)
@ -26,26 +27,24 @@ trait PostgresAround {
logger.info("Starting an ephemeral PostgreSQL instance...")
val tempDir = Files.createTempDirectory("postgres_test")
val dataDir = tempDir.resolve("data")
val confFile = Paths.get(dataDir.toString, "postgresql.conf")
val confFile = dataDir.resolve("postgresql.conf")
val logFile = Files.createFile(tempDir.resolve("postgresql.log"))
val lockedPort = FreePort.find()
val port = lockedPort.port
val jdbcUrl = s"jdbc:postgresql://$hostName:$port/$databaseName?user=$userName"
val logFile = Files.createFile(tempDir.resolve("postgresql.log"))
postgresFixture = PostgresFixture(jdbcUrl, port, tempDir, dataDir, confFile, logFile)
fixture = PostgresFixture(port, tempDir, dataDir, confFile, logFile)
try {
initializeDatabase()
createConfigFile()
startPostgres()
lockedPort.unlock()
createTestDatabase(databaseName)
logger.info(s"PostgreSQL has started on port $port.")
} catch {
case NonFatal(e) =>
lockedPort.unlock()
stopPostgres()
deleteRecursively(tempDir)
postgresFixture = null
fixture = null
throw e
}
}
@ -53,8 +52,9 @@ trait PostgresAround {
protected def stopAndCleanUpPostgres(): Unit = {
logger.info("Stopping and cleaning up PostgreSQL...")
stopPostgres()
deleteRecursively(postgresFixture.tempDir)
deleteRecursively(fixture.tempDir)
logger.info("PostgreSQL has stopped, and the data directory has been deleted.")
fixture = null
}
protected def startPostgres(): Unit = {
@ -69,9 +69,9 @@ trait PostgresAround {
Tool.pg_ctl,
"-w",
"-D",
postgresFixture.dataDir.toString,
fixture.dataDir.toString,
"-l",
postgresFixture.logFile.toString,
fixture.logFile.toString,
"start",
)
} catch {
@ -90,7 +90,7 @@ trait PostgresAround {
Tool.pg_ctl,
"-w",
"-D",
postgresFixture.dataDir.toString,
fixture.dataDir.toString,
"-m",
"immediate",
"stop",
@ -99,10 +99,13 @@ trait PostgresAround {
}
}
protected def createNewDatabase(name: String): PostgresFixture = {
createTestDatabase(name)
val jdbcUrl = s"jdbc:postgresql://$hostName:${postgresFixture.port}/$name?user=$userName"
postgresFixture.copy(jdbcUrl = jdbcUrl)
protected def createNewRandomDatabase(): PostgresDatabase =
createNewDatabase(UUID.randomUUID().toString)
protected def createNewDatabase(name: String): PostgresDatabase = {
val database = PostgresDatabase(hostName, fixture.port, userName, name)
createDatabase(database)
database
}
private def initializeDatabase(): Unit = run(
@ -114,7 +117,7 @@ trait PostgresAround {
"UNICODE",
"-A",
"trust",
postgresFixture.dataDir.toString.replaceAllLiterally("\\", "/"),
fixture.dataDir.toString.replaceAllLiterally("\\", "/"),
)
private def createConfigFile(): Unit = {
@ -133,22 +136,34 @@ trait PostgresAround {
|log_min_duration_statement = 0
|log_connections = on
|listen_addresses = '$hostName'
|port = ${postgresFixture.port}
""".stripMargin
Files.write(postgresFixture.confFile, configText.getBytes(StandardCharsets.UTF_8))
|port = ${fixture.port}
""".stripMargin
Files.write(fixture.confFile, configText.getBytes(StandardCharsets.UTF_8))
()
}
private def createTestDatabase(name: String): Unit = run(
private def createDatabase(database: PostgresDatabase): Unit = run(
"create the database",
Tool.createdb,
"-h",
hostName,
"-U",
userName,
"-p",
postgresFixture.port.toString,
name,
"--host",
database.hostName,
"--port",
database.port.toString,
"--username",
database.userName,
database.databaseName,
)
protected def dropDatabase(database: PostgresDatabase): Unit = run(
"drop a database",
Tool.dropdb,
"--host",
database.hostName,
"--port",
database.port.toString,
"--username",
database.userName,
database.databaseName,
)
private def run(description: String, tool: Tool, args: String*): Unit = {
@ -161,7 +176,7 @@ trait PostgresAround {
IOUtils.copy(process.getInputStream, stdout, StandardCharsets.UTF_8)
val stderr = new StringWriter
IOUtils.copy(process.getErrorStream, stderr, StandardCharsets.UTF_8)
val logs = Files.readAllLines(postgresFixture.logFile).asScala
val logs = Files.readAllLines(fixture.logFile).asScala
throw new ProcessFailedException(
description = description,
command = command,
@ -174,7 +189,7 @@ trait PostgresAround {
case e: ProcessFailedException =>
throw e
case NonFatal(e) =>
val logs = Files.readAllLines(postgresFixture.logFile).asScala
val logs = Files.readAllLines(fixture.logFile).asScala
throw new ProcessFailedException(
description = description,
command = command,
@ -192,8 +207,9 @@ object PostgresAround {
private val logger = LoggerFactory.getLogger(getClass)
private val hostName = InetAddress.getLoopbackAddress.getHostName
private val userName = "test"
private val databaseName = "test"
val userName = "test"
val password = ""
private class ProcessFailedException(
description: String,

View File

@ -3,17 +3,16 @@
package com.daml.testing.postgresql
import org.scalatest.BeforeAndAfterAll
import org.scalatest.{BeforeAndAfterAll, Suite}
trait PostgresAroundAll extends PostgresAround with BeforeAndAfterAll {
self: org.scalatest.Suite =>
trait PostgresAroundAll extends PostgresAroundSuite with BeforeAndAfterAll {
self: Suite =>
override protected def beforeAll(): Unit = {
// we start pg before running the rest because _generally_ the database
// needs to be up before everything else. this is relevant for
// ScenarioLoadingITPostgres at least. we could much with the mixin
// order but this was easier...
// We start PostgreSQL before calling `super` because _generally_ the database needs to be up
// before everything else.
startEphemeralPostgres()
createNewDatabase()
super.beforeAll()
}

View File

@ -3,22 +3,34 @@
package com.daml.testing.postgresql
import org.scalatest.BeforeAndAfterEach
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}
trait PostgresAroundEach extends PostgresAround with BeforeAndAfterEach {
self: org.scalatest.Suite =>
trait PostgresAroundEach
extends PostgresAroundSuite
with BeforeAndAfterAll
with BeforeAndAfterEach {
self: Suite =>
override protected def beforeAll(): Unit = {
// We start PostgreSQL before calling `super` because _generally_ the database needs to be up
// before everything else.
startEphemeralPostgres()
super.beforeAll()
}
override protected def afterAll(): Unit = {
super.afterAll()
stopAndCleanUpPostgres()
}
override protected def beforeEach(): Unit = {
// we start pg before running the rest because _generally_ the database
// needs to be up before everything else. this is relevant for
// ScenarioLoadingITPostgres at least. we could much with the mixin
// order but this was easier...
startEphemeralPostgres()
// We create the database before calling `super` for the same reasons as above.
createNewDatabase()
super.beforeEach()
}
override protected def afterEach(): Unit = {
super.afterEach()
stopAndCleanUpPostgres()
dropDatabase()
}
}

View File

@ -0,0 +1,25 @@
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.testing.postgresql
import org.scalatest.Suite
trait PostgresAroundSuite extends PostgresAround {
self: Suite =>
@volatile
private var database: Option[PostgresDatabase] = None
protected def postgresDatabase: PostgresDatabase = database.get
protected def createNewDatabase(): PostgresDatabase = {
database = Some(createNewRandomDatabase())
postgresDatabase
}
protected def dropDatabase(): Unit = {
dropDatabase(postgresDatabase)
database = None
}
}

View File

@ -0,0 +1,17 @@
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.testing.postgresql
import com.daml.ports.Port
final case class PostgresDatabase(
hostName: String,
port: Port,
userName: String,
databaseName: String,
) {
def url: String = s"jdbc:postgresql://$hostName:$port/$databaseName?user=$userName"
override def toString: String = url
}

View File

@ -7,8 +7,7 @@ import java.nio.file.Path
import com.daml.ports.Port
case class PostgresFixture(
jdbcUrl: String,
final case class PostgresFixture(
port: Port,
tempDir: Path,
dataDir: Path,

View File

@ -8,14 +8,14 @@ import com.daml.resources.{Resource, ResourceOwner}
import scala.concurrent.{ExecutionContext, Future}
object PostgresResource {
def owner(): ResourceOwner[PostgresFixture] =
new ResourceOwner[PostgresFixture] with PostgresAround {
def owner(): ResourceOwner[PostgresDatabase] =
new ResourceOwner[PostgresDatabase] with PostgresAround {
override def acquire()(
implicit executionContext: ExecutionContext
): Resource[PostgresFixture] =
): Resource[PostgresDatabase] =
Resource(Future {
startEphemeralPostgres()
postgresFixture
createNewRandomDatabase()
})(_ => Future(stopAndCleanUpPostgres()))
}
}

View File

@ -23,7 +23,8 @@ private[postgresql] object Tool {
else
""
val createdb = Tool("createdb")
val initdb = Tool("initdb")
val pg_ctl = Tool("pg_ctl")
val createdb: Tool = Tool("createdb")
val dropdb: Tool = Tool("dropdb")
val initdb: Tool = Tool("initdb")
val pg_ctl: Tool = Tool("pg_ctl")
}