diff --git a/bazel-java-deps.bzl b/bazel-java-deps.bzl index 6294cae222dd..c1d02097f437 100644 --- a/bazel-java-deps.bzl +++ b/bazel-java-deps.bzl @@ -116,7 +116,7 @@ def install_java_deps(): "org.mongodb:mongodb-driver-async:3.6.4", "org.mongodb:mongodb-driver-core:3.6.4", "org.pcollections:pcollections:2.1.3", - "org.postgresql:postgresql:42.2.6", + "org.postgresql:postgresql:42.2.9", "org.reactivestreams:reactive-streams:1.0.2", "org.reactivestreams:reactive-streams-tck:1.0.2", "org.sangria-graphql:sangria_2.12:1.4.2", @@ -142,7 +142,7 @@ def install_java_deps(): "org.tpolecat:doobie-postgres_2.12:0.6.0", "org.typelevel:paiges-core_2.12:0.2.1", "org.wartremover:wartremover_2.12:2.2.0", - "org.xerial:sqlite-jdbc:3.25.2", + "org.xerial:sqlite-jdbc:3.30.1", "uk.co.datumedge:hamcrest-json:0.2", ], fetch_sources = True, diff --git a/ledger/ledger-on-sql/BUILD.bazel b/ledger/ledger-on-sql/BUILD.bazel index 7c9814a12e2c..e11d00dd65f6 100644 --- a/ledger/ledger-on-sql/BUILD.bazel +++ b/ledger/ledger-on-sql/BUILD.bazel @@ -5,13 +5,16 @@ load( "//bazel_tools:scala.bzl", "da_scala_binary", "da_scala_library", - "da_scala_test", + "da_scala_test_suite", ) load("//ledger/ledger-api-test-tool:conformance.bzl", "conformance_test") supported_databases = [ { "name": "h2-memory", + "runtime_deps": [ + "@maven//:com_h2database_h2", + ], "conformance_test_tags": [ "manual", ], @@ -22,12 +25,12 @@ supported_databases = [ "--concurrent-test-runs=2", "--timeout-scale-factor=4", ], - "runtime_deps": [ - "@maven//:com_h2database_h2", - ], }, { "name": "h2-file", + "runtime_deps": [ + "@maven//:com_h2database_h2", + ], "conformance_test_tags": [ "manual", ], @@ -39,28 +42,32 @@ supported_databases = [ "--concurrent-test-runs=2", "--timeout-scale-factor=4", ], + }, + { + "name": "postgresql", "runtime_deps": [ - "@maven//:com_h2database_h2", + "@maven//:org_postgresql_postgresql", ], + "conformance_test_server_main": "com.daml.ledger.on.sql.MainWithEphemeralPostgresql", }, { "name": "sqlite-memory", - "conformance_test_server_args": [ - "--jdbc-url=jdbc:sqlite::memory:", - ], "runtime_deps": [ "@maven//:org_xerial_sqlite_jdbc", ], + "conformance_test_server_args": [ + "--jdbc-url=jdbc:sqlite::memory:", + ], }, { "name": "sqlite-file", + "runtime_deps": [ + "@maven//:org_xerial_sqlite_jdbc", + ], "conformance_test_server_main": "com.daml.ledger.on.sql.MainWithEphemeralDirectory", "conformance_test_server_args": [ "--jdbc-url=jdbc:sqlite:%DIR/test.sqlite", ], - "runtime_deps": [ - "@maven//:org_xerial_sqlite_jdbc", - ], }, ] @@ -97,7 +104,27 @@ da_scala_library( ], ) -da_scala_test( +da_scala_library( + name = "ledger-on-sql-test-lib", + srcs = glob(["src/test/lib/scala/**/*.scala"]), + visibility = [ + "//visibility:public", + ], + deps = [ + ":ledger-on-sql", + "//daml-lf/data", + "//ledger/ledger-api-health", + "//ledger/participant-state", + "//ledger/participant-state/kvutils", + "//ledger/participant-state/kvutils/app", + "//libs-scala/postgresql-testing", + "//libs-scala/resources", + "@maven//:com_github_scopt_scopt_2_12", + "@maven//:com_typesafe_akka_akka_stream_2_12", + ], +) + +da_scala_test_suite( name = "ledger-on-sql-tests", size = "small", srcs = glob(["src/test/suite/**/*.scala"]), @@ -107,6 +134,7 @@ da_scala_test( resources = glob(["src/test/resources/*"]), runtime_deps = [ "@maven//:com_h2database_h2", + "@maven//:org_postgresql_postgresql", "@maven//:org_xerial_sqlite_jdbc", ], deps = [ @@ -120,6 +148,7 @@ da_scala_test( "//ledger/participant-state/kvutils", "//ledger/participant-state/kvutils:kvutils-tests-lib", "//libs-scala/contextualized-logging", + "//libs-scala/postgresql-testing", "//libs-scala/resources", "@maven//:com_typesafe_akka_akka_actor_2_12", "@maven//:com_typesafe_akka_akka_stream_2_12", @@ -132,20 +161,12 @@ da_scala_test( ( da_scala_binary( name = "conformance-test-{}-bin".format(db["name"]), - srcs = glob(["src/test/lib/**/*.scala"]), main_class = db.get("conformance_test_server_main", "com.daml.ledger.on.sql.Main"), visibility = ["//visibility:public"], runtime_deps = db.get("runtime_deps", []), deps = [ ":ledger-on-sql", - "//daml-lf/data", - "//ledger/ledger-api-health", - "//ledger/participant-state", - "//ledger/participant-state/kvutils", - "//ledger/participant-state/kvutils/app", - "//libs-scala/resources", - "@maven//:com_github_scopt_scopt_2_12", - "@maven//:com_typesafe_akka_akka_stream_2_12", + ":ledger-on-sql-test-lib", ], ), conformance_test( @@ -157,7 +178,6 @@ da_scala_test( test_tool_args = db.get("conformance_test_tool_args", []) + [ "--all-tests", "--exclude=ConfigManagementServiceIT", - "--exclude=LotsOfPartiesIT", "--exclude=TimeIT", "--exclude=TransactionScaleIT", ], diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/Database.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/Database.scala index 352adb5c4e5d..79626657595c 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/Database.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/Database.scala @@ -4,19 +4,17 @@ package com.daml.ledger.on.sql import com.daml.ledger.on.sql.queries.Queries.InvalidDatabaseException -import com.daml.ledger.on.sql.queries.{H2Queries, Queries, SqliteQueries} +import com.daml.ledger.on.sql.queries.{H2Queries, PostgresqlQueries, Queries, SqliteQueries} import com.digitalasset.logging.{ContextualizedLogger, LoggingContext} import com.digitalasset.resources.ResourceOwner import com.zaxxer.hikari.HikariDataSource import javax.sql.DataSource -sealed trait Database { - val queries: Queries - - val readerConnectionPool: DataSource - - val writerConnectionPool: DataSource -} +case class Database( + queries: Queries, + readerConnectionPool: DataSource, + writerConnectionPool: DataSource, +) object Database { private val logger = ContextualizedLogger.get(classOf[Database]) @@ -31,54 +29,45 @@ object Database { def owner(jdbcUrl: String)(implicit logCtx: LoggingContext): ResourceOwner[Database] = (jdbcUrl match { - case url if url.startsWith("jdbc:h2:") => MultipleReaderSingleWriterDatabase.owner(jdbcUrl) - case url if url.startsWith("jdbc:sqlite:") => SingleConnectionDatabase.owner(jdbcUrl) + case url if url.startsWith("jdbc:h2:") => + MultipleReaderSingleWriterDatabase.owner(jdbcUrl, new H2Queries) + case url if url.startsWith("jdbc:postgresql:") => + MultipleReaderSingleWriterDatabase.owner(jdbcUrl, new PostgresqlQueries) + case url if url.startsWith("jdbc:sqlite:") => + SingleConnectionDatabase.owner(jdbcUrl, new SqliteQueries) case _ => throw new InvalidDatabaseException(jdbcUrl) }).map { database => logger.info(s"Connected to the ledger over JDBC: $jdbcUrl") database } - final class MultipleReaderSingleWriterDatabase( - override val readerConnectionPool: DataSource, - override val writerConnectionPool: DataSource, - ) extends Database { - override val queries: Queries = new H2Queries - } - object MultipleReaderSingleWriterDatabase { - def owner(jdbcUrl: String): ResourceOwner[MultipleReaderSingleWriterDatabase] = + def owner(jdbcUrl: String, queries: Queries): ResourceOwner[Database] = for { readerConnectionPool <- ResourceOwner.forCloseable(() => - newHikariDataSource(jdbcUrl, maximumPoolSize = None)) + newHikariDataSource(jdbcUrl, readOnly = true)) writerConnectionPool <- ResourceOwner.forCloseable(() => newHikariDataSource(jdbcUrl, maximumPoolSize = Some(MaximumWriterConnectionPoolSize))) - } yield new MultipleReaderSingleWriterDatabase(readerConnectionPool, writerConnectionPool) - } - - final class SingleConnectionDatabase(connectionPool: DataSource) extends Database { - override val queries: Queries = new SqliteQueries - - override val readerConnectionPool: DataSource = connectionPool - - override val writerConnectionPool: DataSource = connectionPool + } yield new Database(queries, readerConnectionPool, writerConnectionPool) } object SingleConnectionDatabase { - def owner(jdbcUrl: String): ResourceOwner[SingleConnectionDatabase] = + def owner(jdbcUrl: String, queries: Queries): ResourceOwner[Database] = for { connectionPool <- ResourceOwner.forCloseable(() => newHikariDataSource(jdbcUrl, maximumPoolSize = Some(MaximumWriterConnectionPoolSize))) - } yield new SingleConnectionDatabase(connectionPool) + } yield new Database(queries, connectionPool, connectionPool) } private def newHikariDataSource( jdbcUrl: String, - maximumPoolSize: Option[Int], + maximumPoolSize: Option[Int] = None, + readOnly: Boolean = false, ): HikariDataSource = { val pool = new HikariDataSource() pool.setAutoCommit(false) pool.setJdbcUrl(jdbcUrl) + pool.setReadOnly(readOnly) maximumPoolSize.foreach { maximumPoolSize => pool.setMaximumPoolSize(maximumPoolSize) } diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/SqlLedgerReaderWriter.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/SqlLedgerReaderWriter.scala index c3c2532f293e..cad5d97dc12d 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/SqlLedgerReaderWriter.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/SqlLedgerReaderWriter.scala @@ -102,7 +102,8 @@ class SqlLedgerReaderWriter( ) verifyStateUpdatesAgainstPreDeclaredOutputs(stateUpdates, entryId, submission) queries.updateState(stateUpdates) - appendLog(entryId, Envelope.enclose(logEntry)) + val latestSequenceNo = queries.insertIntoLog(entryId, Envelope.enclose(logEntry)) + latestSequenceNo + 1 } dispatcher.signalNewHead(newHead) SubmissionResult.Acknowledged @@ -130,14 +131,6 @@ class SqlLedgerReaderWriter( .setEntryId(ByteString.copyFromUtf8(UUID.randomUUID().toString)) .build() - private def appendLog( - entry: DamlLogEntryId, - envelope: ByteString, - )(implicit connection: Connection): Index = { - queries.insertIntoLog(entry, envelope) - queries.lastLogInsertId() + 1 - } - private def readState( stateInputKeys: Set[DamlStateKey], )(implicit connection: Connection): Map[DamlStateKey, Option[DamlStateValue]] = { diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/CommonQueries.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/CommonQueries.scala index 485f785009a8..16511fd6123c 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/CommonQueries.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/CommonQueries.scala @@ -20,12 +20,6 @@ import com.google.protobuf.ByteString import scala.collection.immutable trait CommonQueries extends Queries { - override def createStateTable()(implicit connection: Connection): Unit = { - SQL"CREATE TABLE IF NOT EXISTS state (key VARBINARY(16384) PRIMARY KEY NOT NULL, value BLOB NOT NULL)" - .execute() - () - } - override def selectFromLog( start: Index, end: Index, @@ -45,15 +39,6 @@ trait CommonQueries extends Queries { }.* ) - override def insertIntoLog( - entry: DamlLogEntryId, - envelope: ByteString, - )(implicit connection: Connection): Unit = { - SQL"INSERT INTO log (entry_id, envelope) VALUES (${entry.getEntryId.toByteArray}, ${envelope.toByteArray})" - .executeInsert() - () - } - override def selectStateByKeys( keys: Iterable[DamlStateKey], )(implicit connection: Connection): immutable.Seq[(DamlStateKey, Option[DamlStateValue])] = diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/H2Queries.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/H2Queries.scala index 4c0b180186f7..aee115c65d1b 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/H2Queries.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/H2Queries.scala @@ -8,6 +8,8 @@ import java.sql.Connection import anorm.SqlParser._ import anorm._ import com.daml.ledger.on.sql.queries.Queries.Index +import com.daml.ledger.participant.state.kvutils.DamlKvutils.DamlLogEntryId +import com.google.protobuf.ByteString class H2Queries extends Queries with CommonQueries { override def createLogTable()(implicit connection: Connection): Unit = { @@ -16,9 +18,21 @@ class H2Queries extends Queries with CommonQueries { () } - override def lastLogInsertId()(implicit connection: Connection): Index = + override def createStateTable()(implicit connection: Connection): Unit = { + SQL"CREATE TABLE IF NOT EXISTS state (key VARBINARY(16384) PRIMARY KEY NOT NULL, value BLOB NOT NULL)" + .execute() + () + } + + override def insertIntoLog( + entry: DamlLogEntryId, + envelope: ByteString, + )(implicit connection: Connection): Index = { + SQL"INSERT INTO log (entry_id, envelope) VALUES (${entry.getEntryId.newInput()}, ${envelope.newInput()})" + .executeInsert() SQL"CALL IDENTITY()" .as(long("IDENTITY()").single) + } override protected val updateStateQuery: String = "MERGE INTO state VALUES ({key}, {value})" diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/PostgresqlQueries.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/PostgresqlQueries.scala new file mode 100644 index 000000000000..2a6dd767927c --- /dev/null +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/PostgresqlQueries.scala @@ -0,0 +1,37 @@ +// Copyright (c) 2020 The DAML Authors. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.daml.ledger.on.sql.queries + +import java.sql.Connection + +import anorm.SqlParser._ +import anorm._ +import com.daml.ledger.on.sql.queries.Queries.Index +import com.daml.ledger.participant.state.kvutils.DamlKvutils.DamlLogEntryId +import com.google.protobuf.ByteString + +class PostgresqlQueries extends Queries with CommonQueries { + override def createLogTable()(implicit connection: Connection): Unit = { + SQL"CREATE TABLE IF NOT EXISTS log (sequence_no SERIAL PRIMARY KEY, entry_id BYTEA NOT NULL, envelope BYTEA NOT NULL)" + .execute() + () + } + + override def createStateTable()(implicit connection: Connection): Unit = { + SQL"CREATE TABLE IF NOT EXISTS state (key BYTEA PRIMARY KEY NOT NULL, value BYTEA NOT NULL)" + .execute() + () + } + + override def insertIntoLog( + entry: DamlLogEntryId, + envelope: ByteString, + )(implicit connection: Connection): Index = { + SQL"INSERT INTO log (entry_id, envelope) VALUES (${entry.getEntryId.newInput()}, ${envelope.newInput()}) RETURNING sequence_no" + .as(long("sequence_no").single) + } + + override protected val updateStateQuery: String = + "INSERT INTO state VALUES ({key}, {value}) ON CONFLICT(key) DO UPDATE SET value = {value}" +} diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/Queries.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/Queries.scala index 2a93824ec18c..113bbb9efe5e 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/Queries.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/Queries.scala @@ -26,9 +26,7 @@ trait Queries { def insertIntoLog( entry: DamlKvutils.DamlLogEntryId, envelope: ByteString, - )(implicit connection: Connection): Unit - - def lastLogInsertId()(implicit connection: Connection): Index + )(implicit connection: Connection): Index def selectStateByKeys( keys: Iterable[DamlKvutils.DamlStateKey], diff --git a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/SqliteQueries.scala b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/SqliteQueries.scala index 14ab9020199a..473379a93caf 100644 --- a/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/SqliteQueries.scala +++ b/ledger/ledger-on-sql/src/main/scala/com/daml/ledger/on/sql/queries/SqliteQueries.scala @@ -8,6 +8,8 @@ import java.sql.Connection import anorm.SqlParser._ import anorm._ import com.daml.ledger.on.sql.queries.Queries.Index +import com.daml.ledger.participant.state.kvutils.DamlKvutils.DamlLogEntryId +import com.google.protobuf.ByteString class SqliteQueries extends Queries with CommonQueries { override def createLogTable()(implicit connection: Connection): Unit = { @@ -16,7 +18,18 @@ class SqliteQueries extends Queries with CommonQueries { () } - override def lastLogInsertId()(implicit connection: Connection): Index = { + override def createStateTable()(implicit connection: Connection): Unit = { + SQL"CREATE TABLE IF NOT EXISTS state (key VARBINARY(16384) PRIMARY KEY NOT NULL, value BLOB NOT NULL)" + .execute() + () + } + + override def insertIntoLog( + entry: DamlLogEntryId, + envelope: ByteString, + )(implicit connection: Connection): Index = { + SQL"INSERT INTO log (entry_id, envelope) VALUES (${entry.getEntryId.toByteArray}, ${envelope.toByteArray})" + .executeInsert() SQL"SELECT LAST_INSERT_ROWID()" .as(long("LAST_INSERT_ROWID()").single) } diff --git a/ledger/ledger-on-sql/src/test/lib/scala/com/daml/ledger/on/sql/MainWithEphemeralPostgresql.scala b/ledger/ledger-on-sql/src/test/lib/scala/com/daml/ledger/on/sql/MainWithEphemeralPostgresql.scala new file mode 100644 index 000000000000..3234d7b6ef62 --- /dev/null +++ b/ledger/ledger-on-sql/src/test/lib/scala/com/daml/ledger/on/sql/MainWithEphemeralPostgresql.scala @@ -0,0 +1,37 @@ +// Copyright (c) 2020 The DAML Authors. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.daml.ledger.on.sql + +import akka.stream.Materializer +import com.daml.ledger.on.sql.Main.{ExtraConfig, SqlLedgerFactory} +import com.daml.ledger.participant.state.kvutils.app.{Config, LedgerFactory, Runner} +import com.daml.ledger.participant.state.v1.{LedgerId, ParticipantId} +import com.digitalasset.resources.ResourceOwner +import com.digitalasset.testing.postgresql.PostgresAround +import scopt.OptionParser + +object MainWithEphemeralPostgresql extends App with PostgresAround { + startEphemeralPostgres() + sys.addShutdownHook(stopAndCleanUpPostgres()) + + Runner("SQL Ledger", PostgresqlLedgerFactory).run(args) + + object PostgresqlLedgerFactory extends LedgerFactory[SqlLedgerReaderWriter, Unit] { + override val defaultExtraConfig: Unit = () + + override def extraConfigParser(parser: OptionParser[Config[Unit]]): Unit = () + + override def owner( + ledgerId: LedgerId, + participantId: ParticipantId, + config: Unit, + )(implicit materializer: Materializer): ResourceOwner[SqlLedgerReaderWriter] = { + SqlLedgerFactory.owner( + ledgerId, + participantId, + ExtraConfig(jdbcUrl = Some(postgresFixture.jdbcUrl)), + ) + } + } +} diff --git a/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/H2FileSqlLedgerReaderWriterIntegrationSpec.scala b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/H2FileSqlLedgerReaderWriterIntegrationSpec.scala index 286b5f0827f0..a5ec33d995b8 100644 --- a/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/H2FileSqlLedgerReaderWriterIntegrationSpec.scala +++ b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/H2FileSqlLedgerReaderWriterIntegrationSpec.scala @@ -21,27 +21,20 @@ class H2FileSqlLedgerReaderWriterIntegrationSpec extends ParticipantStateIntegrationSpecBase("SQL implementation using H2 with a file") { private implicit val ec: ExecutionContext = ExecutionContext.global - private var databaseFile: Path = _ - override val startIndex: Long = SqlLedgerReaderWriter.StartIndex + private var directory: Path = _ + override def beforeEach(): Unit = { - databaseFile = Files.createTempFile(getClass.getSimpleName, ".db") + directory = Files.createTempDirectory(getClass.getSimpleName) super.beforeEach() } - override def afterEach(): Unit = { - super.afterEach() - if (databaseFile != null) { - Files.delete(databaseFile) - } - } - override def participantStateFactory( participantId: ParticipantId, ledgerId: LedgerString, ): ResourceOwner[ParticipantState] = { - val jdbcUrl = s"jdbc:h2:file:$databaseFile" + val jdbcUrl = s"jdbc:h2:file:$directory/test" newLoggingContext { implicit logCtx => SqlLedgerReaderWriter .owner(ledgerId, participantId, jdbcUrl) diff --git a/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/PostgresqlSqlLedgerReaderWriterIntegrationSpec.scala b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/PostgresqlSqlLedgerReaderWriterIntegrationSpec.scala new file mode 100644 index 000000000000..c71843595c1c --- /dev/null +++ b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/PostgresqlSqlLedgerReaderWriterIntegrationSpec.scala @@ -0,0 +1,53 @@ +// Copyright (c) 2020 The DAML Authors. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.daml.ledger.on.sql + +import java.sql.DriverManager +import java.time.Clock + +import com.daml.ledger.participant.state.kvutils.ParticipantStateIntegrationSpecBase +import com.daml.ledger.participant.state.kvutils.ParticipantStateIntegrationSpecBase.ParticipantState +import com.daml.ledger.participant.state.kvutils.api.KeyValueParticipantState +import com.daml.ledger.participant.state.v1._ +import com.digitalasset.daml.lf.data.Ref.LedgerString +import com.digitalasset.daml.lf.data.Time.Timestamp +import com.digitalasset.logging.LoggingContext.newLoggingContext +import com.digitalasset.resources.ResourceOwner +import com.digitalasset.testing.postgresql.PostgresAroundAll + +import scala.concurrent.ExecutionContext + +class PostgresqlSqlLedgerReaderWriterIntegrationSpec + extends ParticipantStateIntegrationSpecBase("SQL implementation using PostgreSQL") + with PostgresAroundAll { + private implicit val ec: ExecutionContext = ExecutionContext.global + + override val startIndex: Long = SqlLedgerReaderWriter.StartIndex + + override def participantStateFactory( + participantId: ParticipantId, + ledgerId: LedgerString, + ): ResourceOwner[ParticipantState] = { + newLoggingContext { implicit logCtx => + SqlLedgerReaderWriter + .owner(ledgerId, participantId, postgresFixture.jdbcUrl) + .map(readerWriter => new KeyValueParticipantState(readerWriter, readerWriter)) + } + } + + override protected def beforeEach(): Unit = { + super.beforeEach() + val connection = DriverManager.getConnection(postgresFixture.jdbcUrl) + try { + connection.prepareStatement("TRUNCATE log RESTART IDENTITY").execute() + connection.prepareStatement("TRUNCATE state RESTART IDENTITY").execute() + () + } finally { + connection.close() + } + } + + override def currentRecordTime(): Timestamp = + Timestamp.assertFromInstant(Clock.systemUTC().instant()) +} diff --git a/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/SqliteFileSqlLedgerReaderWriterIntegrationSpec.scala b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/SqliteFileSqlLedgerReaderWriterIntegrationSpec.scala index b78a932d9fa1..55a3ec890e04 100644 --- a/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/SqliteFileSqlLedgerReaderWriterIntegrationSpec.scala +++ b/ledger/ledger-on-sql/src/test/suite/scala/com/daml/ledger/on/sql/SqliteFileSqlLedgerReaderWriterIntegrationSpec.scala @@ -21,27 +21,20 @@ class SqliteFileSqlLedgerReaderWriterIntegrationSpec extends ParticipantStateIntegrationSpecBase("SQL implementation using SQLite with a file") { private implicit val ec: ExecutionContext = ExecutionContext.global - private var databaseFile: Path = _ - override val startIndex: Long = SqlLedgerReaderWriter.StartIndex + private var directory: Path = _ + override def beforeEach(): Unit = { - databaseFile = Files.createTempFile(getClass.getSimpleName, ".db") + directory = Files.createTempDirectory(getClass.getSimpleName) super.beforeEach() } - override def afterEach(): Unit = { - super.afterEach() - if (databaseFile != null) { - Files.delete(databaseFile) - } - } - override def participantStateFactory( participantId: ParticipantId, ledgerId: LedgerString, ): ResourceOwner[ParticipantState] = { - val jdbcUrl = s"jdbc:sqlite:$databaseFile" + val jdbcUrl = s"jdbc:sqlite:$directory/test.sqlite" newLoggingContext { implicit logCtx => SqlLedgerReaderWriter .owner(ledgerId, participantId, jdbcUrl) diff --git a/maven_install.json b/maven_install.json index ca07bda9a142..01f8a3bb6eec 100644 --- a/maven_install.json +++ b/maven_install.json @@ -7891,26 +7891,26 @@ "sha256": "326b66bf9a18727c59fdd77306cd2f22dfcf678049998b4f823dd29c632ccc0a" }, { - "coord": "org.postgresql:postgresql:42.2.6", - "file": "v1/https/repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6.jar", + "coord": "org.postgresql:postgresql:42.2.9", + "file": "v1/https/repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9.jar", "directDependencies": [], "dependencies": [], - "url": "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6.jar", + "url": "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6.jar" + "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9.jar" ], - "sha256": "07daadb33e87638703bf41f3307fc0dbdb386e54af5d5d6481511a36f50ca004" + "sha256": "2bd6cdf3a6a277135f74f9d138ba24d0bda15c3a79014093aedfa698cb6627da" }, { - "coord": "org.postgresql:postgresql:jar:sources:42.2.6", - "file": "v1/https/repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6-sources.jar", + "coord": "org.postgresql:postgresql:jar:sources:42.2.9", + "file": "v1/https/repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9-sources.jar", "directDependencies": [], "dependencies": [], - "url": "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6-sources.jar", + "url": "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9-sources.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.6/postgresql-42.2.6-sources.jar" + "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9-sources.jar" ], - "sha256": "0d7f02c19f88d42045111ea8352428b1074723f1f756e72989850351bdfeabea" + "sha256": "26a725949e684640deb9c4d9b6c3e74bc452b4a5b761030737dafacd28270095" }, { "coord": "org.reactivestreams:reactive-streams-examples:1.0.2", @@ -14155,14 +14155,14 @@ "file": "v1/https/repo1.maven.org/maven2/org/tpolecat/doobie-postgres_2.12/0.6.0/doobie-postgres_2.12-0.6.0.jar", "directDependencies": [ "co.fs2:fs2-io_2.12:1.0.0", - "org.postgresql:postgresql:42.2.6", + "org.postgresql:postgresql:42.2.9", "org.scala-lang:scala-library:2.12.10", "org.tpolecat:doobie-core_2.12:0.6.0" ], "dependencies": [ "org.tpolecat:doobie-free_2.12:0.6.0", - "org.postgresql:postgresql:42.2.6", "org.tpolecat:doobie-core_2.12:0.6.0", + "org.postgresql:postgresql:42.2.9", "org.scala-lang:scala-reflect:2.12.8", "org.typelevel:macro-compat_2.12:1.1.1", "org.typelevel:cats-core_2.12:1.4.0", @@ -14189,7 +14189,7 @@ "file": "v1/https/repo1.maven.org/maven2/org/tpolecat/doobie-postgres_2.12/0.6.0/doobie-postgres_2.12-0.6.0-sources.jar", "directDependencies": [ "co.fs2:fs2-io_2.12:jar:sources:1.0.0", - "org.postgresql:postgresql:jar:sources:42.2.6", + "org.postgresql:postgresql:jar:sources:42.2.9", "org.scala-lang:scala-library:jar:sources:2.12.10", "org.tpolecat:doobie-core_2.12:jar:sources:0.6.0" ], @@ -14200,9 +14200,9 @@ "org.tpolecat:doobie-core_2.12:jar:sources:0.6.0", "org.typelevel:cats-kernel_2.12:jar:sources:1.4.0", "co.fs2:fs2-io_2.12:jar:sources:1.0.0", + "org.postgresql:postgresql:jar:sources:42.2.9", "com.chuusai:shapeless_2.12:jar:sources:2.3.2", "co.fs2:fs2-core_2.12:jar:sources:1.0.0", - "org.postgresql:postgresql:jar:sources:42.2.6", "org.typelevel:machinist_2.12:jar:sources:0.6.5", "org.typelevel:cats-macros_2.12:jar:sources:1.4.0", "com.lihaoyi:sourcecode_2.12:jar:sources:0.1.7", @@ -14549,26 +14549,26 @@ "sha256": "e15236770b2409de1fac08767b99fa2e7f33b6e71a9824d52dba5cbf19053e7e" }, { - "coord": "org.xerial:sqlite-jdbc:3.25.2", - "file": "v1/https/repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2.jar", + "coord": "org.xerial:sqlite-jdbc:3.30.1", + "file": "v1/https/repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1.jar", "directDependencies": [], "dependencies": [], - "url": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2.jar", + "url": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2.jar" + "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1.jar" ], - "sha256": "a45da61abed61568a533fdece125093180828edeb0d4b6f6d572e0cf457465f6" + "sha256": "280034a8993000104c5b36bc5e1139b0e82df1de9cfd951f529bdadeaf6f456d" }, { - "coord": "org.xerial:sqlite-jdbc:jar:sources:3.25.2", - "file": "v1/https/repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2-sources.jar", + "coord": "org.xerial:sqlite-jdbc:jar:sources:3.30.1", + "file": "v1/https/repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1-sources.jar", "directDependencies": [], "dependencies": [], - "url": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2-sources.jar", + "url": "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1-sources.jar", "mirror_urls": [ - "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.25.2/sqlite-jdbc-3.25.2-sources.jar" + "https://repo1.maven.org/maven2/org/xerial/sqlite-jdbc/3.30.1/sqlite-jdbc-3.30.1-sources.jar" ], - "sha256": "58073a1103ffa769b2511c60f88c647a922ab0f6ab621625f9ade223064f89e3" + "sha256": "39f86de859df0a19972014e95e6aa1fe0eb5d7677ce8694f719c0d3586d8c8b6" }, { "coord": "org.yaml:snakeyaml:1.24", @@ -14688,6 +14688,6 @@ } ], "version": "0.1.0", - "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": -537014546 + "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": 1501980171 } }