diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/backend/DBDTOV1.scala b/ledger/participant-integration-api/src/main/scala/platform/store/backend/DBDTOV1.scala index c4d2c27e75d2..7f865161f9f0 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/backend/DBDTOV1.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/backend/DBDTOV1.scala @@ -5,11 +5,16 @@ package com.daml.platform.store.backend import java.time.Instant -trait DBDTOV1 +import com.daml.scalautil.NeverEqualsOverride + +sealed trait DBDTOV1 + extends NeverEqualsOverride + with Product + with Serializable // to aid type inference for case class implementors object DBDTOV1 { - case class EventDivulgence( + final case class EventDivulgence( event_offset: Option[String], command_id: Option[String], workflow_id: Option[String], @@ -23,7 +28,7 @@ object DBDTOV1 { event_sequential_id: Long, ) extends DBDTOV1 - case class EventCreate( + final case class EventCreate( event_offset: Option[String], transaction_id: Option[String], ledger_effective_time: Option[Instant], @@ -48,7 +53,7 @@ object DBDTOV1 { event_sequential_id: Long, ) extends DBDTOV1 - case class EventExercise( + final case class EventExercise( consuming: Boolean, event_offset: Option[String], transaction_id: Option[String], @@ -75,67 +80,63 @@ object DBDTOV1 { event_sequential_id: Long, ) extends DBDTOV1 - // TODO append-only: wartremover complained about having Array-s in case classes. I would prefer case classes. can we work that somehow around? Similarly in other DTO cases... - // TODO append-only: there are some options: - // - mixing in SomeArrayEquals if we need array equality for some reason: would be proper if we move SomeArrayEquals out from speedy codebase to scalalib first. - // - spawning somewhere something like trait NeverEqualsOverride { override equals(o: Object): Boolean = false }, and mixing in these classes - class ConfigurationEntry( - val ledger_offset: String, - val recorded_at: Instant, - val submission_id: String, - val typ: String, - val configuration: Array[Byte], - val rejection_reason: Option[String], + final case class ConfigurationEntry( + ledger_offset: String, + recorded_at: Instant, + submission_id: String, + typ: String, + configuration: Array[Byte], + rejection_reason: Option[String], ) extends DBDTOV1 - class PackageEntry( - val ledger_offset: String, - val recorded_at: Instant, - val submission_id: Option[String], - val typ: String, - val rejection_reason: Option[String], + final case class PackageEntry( + ledger_offset: String, + recorded_at: Instant, + submission_id: Option[String], + typ: String, + rejection_reason: Option[String], ) extends DBDTOV1 - class Package( - val package_id: String, - val upload_id: String, - val source_description: Option[String], - val size: Long, - val known_since: Instant, - val ledger_offset: String, - val _package: Array[Byte], + final case class Package( + package_id: String, + upload_id: String, + source_description: Option[String], + size: Long, + known_since: Instant, + ledger_offset: String, + _package: Array[Byte], ) extends DBDTOV1 - class PartyEntry( - val ledger_offset: String, - val recorded_at: Instant, - val submission_id: Option[String], - val party: Option[String], - val display_name: Option[String], - val typ: String, - val rejection_reason: Option[String], - val is_local: Option[Boolean], + final case class PartyEntry( + ledger_offset: String, + recorded_at: Instant, + submission_id: Option[String], + party: Option[String], + display_name: Option[String], + typ: String, + rejection_reason: Option[String], + is_local: Option[Boolean], ) extends DBDTOV1 - class Party( - val party: String, - val display_name: Option[String], - val explicit: Boolean, - val ledger_offset: Option[String], - val is_local: Boolean, + final case class Party( + party: String, + display_name: Option[String], + explicit: Boolean, + ledger_offset: Option[String], + is_local: Boolean, ) extends DBDTOV1 - class CommandCompletion( - val completion_offset: String, - val record_time: Instant, - val application_id: String, - val submitters: Set[String], - val command_id: String, - val transaction_id: Option[String], - val status_code: Option[Int], - val status_message: Option[String], + final case class CommandCompletion( + completion_offset: String, + record_time: Instant, + application_id: String, + submitters: Set[String], + command_id: String, + transaction_id: Option[String], + status_code: Option[Int], + status_message: Option[String], ) extends DBDTOV1 - class CommandDeduplication(val deduplication_key: String) extends DBDTOV1 + final case class CommandDeduplication(deduplication_key: String) extends DBDTOV1 } diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/backend/StorageBackend.scala b/ledger/participant-integration-api/src/main/scala/platform/store/backend/StorageBackend.scala index b8e1e3e86334..f923900ccd74 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/backend/StorageBackend.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/backend/StorageBackend.scala @@ -9,12 +9,51 @@ import com.daml.ledger.participant.state.v1.Offset import com.daml.platform.store.DbType import com.daml.platform.store.backend.postgresql.PostgresStorageBackend -// TODO append-only: add detailed scaladoc +/** Encapsulates the interface which hides database technology specific implementations for parallel ingestion. + * + * @tparam DB_BATCH Since parallel ingestion comes also with batching, this implementation specific type allows separation of the CPU intensive batching operation from the pure IO intensive insertBatch operation. + */ trait StorageBackend[DB_BATCH] { + + /** The CPU intensive batching operation hides the batching logic, and the mapping to the database specific representation of the inserted data. + * This should be pure CPU logic without IO. + * + * @param dbDtos is a collection of DBDTOV1 from which the batch is formed + * @return the database-specific batch DTO, which can be inserted via insertBatch + */ def batch(dbDtos: Vector[DBDTOV1]): DB_BATCH + + /** Using a JDBC connection, a batch will be inserted into the database. + * No significant CPU load, mostly blocking JDBC communication with the database backend. + * + * @param connection to be used when inserting the batch + * @param batch to be inserted + */ def insertBatch(connection: Connection, batch: DB_BATCH): Unit + + /** This method is used to update the parameters table: setting the new observable ledger-end, and other parameters. + * No significant CPU load, mostly blocking JDBC communication with the database backend. + * + * @param connection to be used when updating the parameters table + * @param params the parameters + */ def updateParams(connection: Connection, params: StorageBackend.Params): Unit + + /** Custom initialization code before the start of an ingestion. + * This method is responsible for the recovery after a possibly non-graceful stop of previous indexing. + * No significant CPU load, mostly blocking JDBC communication with the database backend. + * + * @param connection to be used when initializing + * @return the LedgerEnd, which should be the basis for further indexing. + */ def initialize(connection: Connection): StorageBackend.LedgerEnd + + /** Query the ledgerEnd, read from the parameters table. + * No significant CPU load, mostly blocking JDBC communication with the database backend. + * + * @param connection to be used to get the LedgerEnd + * @return the LedgerEnd, which should be the basis for further indexing + */ def ledgerEnd(connection: Connection): StorageBackend.LedgerEnd } diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/backend/UpdateToDBDTOV1.scala b/ledger/participant-integration-api/src/main/scala/platform/store/backend/UpdateToDBDTOV1.scala index 4a9de1014e35..29ae6c7c74f6 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/backend/UpdateToDBDTOV1.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/backend/UpdateToDBDTOV1.scala @@ -14,7 +14,6 @@ import com.daml.platform.store.appendonlydao.JdbcLedgerDao import com.daml.platform.store.appendonlydao.events._ import com.daml.platform.store.dao.DeduplicationKeyMaker -// TODO append-only: target to separation per update-type to it's own function + unit tests object UpdateToDBDTOV1 { def apply( @@ -24,9 +23,8 @@ object UpdateToDBDTOV1 { ): Offset => Update => Iterator[DBDTOV1] = { offset => { case u: Update.CommandRejected => - // TODO append-only: we might want to tune up deduplications so it is also a temporal query Iterator( - new DBDTOV1.CommandCompletion( + DBDTOV1.CommandCompletion( completion_offset = offset.toHexString, record_time = u.recordTime.toInstant, application_id = u.submitterInfo.applicationId, @@ -36,7 +34,7 @@ object UpdateToDBDTOV1 { status_code = Some(Conversions.participantRejectionReasonToErrorCode(u.reason).value()), status_message = Some(u.reason.description), ), - new DBDTOV1.CommandDeduplication( + DBDTOV1.CommandDeduplication( DeduplicationKeyMaker.make( domain.CommandId(u.submitterInfo.commandId), u.submitterInfo.actAs, @@ -46,7 +44,7 @@ object UpdateToDBDTOV1 { case u: Update.ConfigurationChanged => Iterator( - new DBDTOV1.ConfigurationEntry( + DBDTOV1.ConfigurationEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = u.submissionId, @@ -58,7 +56,7 @@ object UpdateToDBDTOV1 { case u: Update.ConfigurationChangeRejected => Iterator( - new DBDTOV1.ConfigurationEntry( + DBDTOV1.ConfigurationEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = u.submissionId, @@ -70,7 +68,7 @@ object UpdateToDBDTOV1 { case u: Update.PartyAddedToParticipant => Iterator( - new DBDTOV1.PartyEntry( + DBDTOV1.PartyEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = u.submissionId, @@ -80,7 +78,7 @@ object UpdateToDBDTOV1 { rejection_reason = None, is_local = Some(u.participantId == participantId), ), - new DBDTOV1.Party( + DBDTOV1.Party( party = u.party, display_name = Some(u.displayName), explicit = true, @@ -91,7 +89,7 @@ object UpdateToDBDTOV1 { case u: Update.PartyAllocationRejected => Iterator( - new DBDTOV1.PartyEntry( + DBDTOV1.PartyEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = Some(u.submissionId), @@ -106,7 +104,7 @@ object UpdateToDBDTOV1 { case u: Update.PublicPackageUpload => val uploadId = u.submissionId.getOrElse(UUID.randomUUID().toString) val packages = u.archives.iterator.map { archive => - new DBDTOV1.Package( + DBDTOV1.Package( package_id = archive.getHash, upload_id = uploadId, source_description = u.sourceDescription, @@ -117,7 +115,7 @@ object UpdateToDBDTOV1 { ) } val packageEntries = u.submissionId.iterator.map(submissionId => - new DBDTOV1.PackageEntry( + DBDTOV1.PackageEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = Some(submissionId), @@ -129,7 +127,7 @@ object UpdateToDBDTOV1 { case u: Update.PublicPackageUploadRejected => Iterator( - new DBDTOV1.PackageEntry( + DBDTOV1.PackageEntry( ledger_offset = offset.toHexString, recorded_at = u.recordTime.toInstant, submission_id = Some(u.submissionId), @@ -261,7 +259,7 @@ object UpdateToDBDTOV1 { } val completions = u.optSubmitterInfo.iterator.map { submitterInfo => - new DBDTOV1.CommandCompletion( + DBDTOV1.CommandCompletion( completion_offset = offset.toHexString, record_time = u.recordTime.toInstant, application_id = submitterInfo.applicationId, diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/backend/postgresql/RawDBBatchPostgreSQLV1.scala b/ledger/participant-integration-api/src/main/scala/platform/store/backend/postgresql/RawDBBatchPostgreSQLV1.scala index 354ad1d26795..0bf734417112 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/backend/postgresql/RawDBBatchPostgreSQLV1.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/backend/postgresql/RawDBBatchPostgreSQLV1.scala @@ -7,6 +7,7 @@ import java.time.format.DateTimeFormatter import java.time.{Instant, ZoneOffset} import com.daml.platform.store.backend.DBDTOV1 +import com.daml.scalautil.NeverEqualsOverride import scala.collection.mutable @@ -31,129 +32,129 @@ case class RawDBBatchPostgreSQLV1( commandDeduplicationBatch: Option[CommandDeduplicationBatch], ) -class EventsBatchDivulgence( - val event_offset: Array[String], - val command_id: Array[String], - val workflow_id: Array[String], - val application_id: Array[String], - val submitters: Array[String], // '|' separated list - val contract_id: Array[String], - val template_id: Array[String], - val tree_event_witnesses: Array[String], // '|' separated list - val create_argument: Array[Array[Byte]], - val event_sequential_id: Array[Long], - val create_argument_compression: Array[java.lang.Integer], -) - -class EventsBatchCreate( - val event_offset: Array[String], - val transaction_id: Array[String], - val ledger_effective_time: Array[String], // timestamp - val command_id: Array[String], - val workflow_id: Array[String], - val application_id: Array[String], - val submitters: Array[String], // '|' separated list - val node_index: Array[java.lang.Integer], - val event_id: Array[String], - val contract_id: Array[String], - val template_id: Array[String], - val flat_event_witnesses: Array[String], // '|' separated list - val tree_event_witnesses: Array[String], // '|' separated list - val create_argument: Array[Array[Byte]], - val create_signatories: Array[String], // '|' separated list - val create_observers: Array[String], // '|' separated list - val create_agreement_text: Array[String], - val create_key_value: Array[Array[Byte]], - val create_key_hash: Array[String], - val event_sequential_id: Array[Long], - val create_argument_compression: Array[java.lang.Integer], - val create_key_value_compression: Array[java.lang.Integer], -) - -class EventsBatchExercise( - val event_offset: Array[String], - val transaction_id: Array[String], - val ledger_effective_time: Array[String], // timestamp - val command_id: Array[String], - val workflow_id: Array[String], - val application_id: Array[String], - val submitters: Array[String], // '|' separated list - val node_index: Array[java.lang.Integer], - val event_id: Array[String], - val contract_id: Array[String], - val template_id: Array[String], - val flat_event_witnesses: Array[String], // '|' separated list - val tree_event_witnesses: Array[String], // '|' separated list - val create_key_value: Array[Array[Byte]], - val exercise_choice: Array[String], - val exercise_argument: Array[Array[Byte]], - val exercise_result: Array[Array[Byte]], - val exercise_actors: Array[String], // '|' separated list - val exercise_child_event_ids: Array[String], // '|' separated list - val event_sequential_id: Array[Long], - val create_key_value_compression: Array[java.lang.Integer], - val exercise_argument_compression: Array[java.lang.Integer], - val exercise_result_compression: Array[java.lang.Integer], -) - -class ConfigurationEntriesBatch( - val ledger_offset: Array[String], - val recorded_at: Array[String], // timestamp - val submission_id: Array[String], - val typ: Array[String], - val configuration: Array[Array[Byte]], - val rejection_reason: Array[String], -) - -class PackageEntriesBatch( - val ledger_offset: Array[String], - val recorded_at: Array[String], // timestamp - val submission_id: Array[String], - val typ: Array[String], - val rejection_reason: Array[String], -) - -class PackagesBatch( - val package_id: Array[String], - val upload_id: Array[String], - val source_description: Array[String], - val size: Array[Long], - val known_since: Array[String], // timestamp - val ledger_offset: Array[String], - val _package: Array[Array[Byte]], -) - -class PartiesBatch( - val party: Array[String], - val display_name: Array[String], - val explicit: Array[Boolean], - val ledger_offset: Array[String], - val is_local: Array[Boolean], -) - -class PartyEntriesBatch( - val ledger_offset: Array[String], - val recorded_at: Array[String], // timestamp - val submission_id: Array[String], - val party: Array[String], - val display_name: Array[String], - val typ: Array[String], - val rejection_reason: Array[String], - val is_local: Array[java.lang.Boolean], -) - -class CommandCompletionsBatch( - val completion_offset: Array[String], - val record_time: Array[String], // timestamp - val application_id: Array[String], - val submitters: Array[String], // '|' separated list - val command_id: Array[String], - val transaction_id: Array[String], - val status_code: Array[java.lang.Integer], - val status_message: Array[String], -) - -class CommandDeduplicationBatch(val deduplication_key: Array[String]) +case class EventsBatchDivulgence( + event_offset: Array[String], + command_id: Array[String], + workflow_id: Array[String], + application_id: Array[String], + submitters: Array[String], // '|' separated list + contract_id: Array[String], + template_id: Array[String], + tree_event_witnesses: Array[String], // '|' separated list + create_argument: Array[Array[Byte]], + event_sequential_id: Array[Long], + create_argument_compression: Array[java.lang.Integer], +) extends NeverEqualsOverride + +case class EventsBatchCreate( + event_offset: Array[String], + transaction_id: Array[String], + ledger_effective_time: Array[String], // timestamp + command_id: Array[String], + workflow_id: Array[String], + application_id: Array[String], + submitters: Array[String], // '|' separated list + node_index: Array[java.lang.Integer], + event_id: Array[String], + contract_id: Array[String], + template_id: Array[String], + flat_event_witnesses: Array[String], // '|' separated list + tree_event_witnesses: Array[String], // '|' separated list + create_argument: Array[Array[Byte]], + create_signatories: Array[String], // '|' separated list + create_observers: Array[String], // '|' separated list + create_agreement_text: Array[String], + create_key_value: Array[Array[Byte]], + create_key_hash: Array[String], + event_sequential_id: Array[Long], + create_argument_compression: Array[java.lang.Integer], + create_key_value_compression: Array[java.lang.Integer], +) extends NeverEqualsOverride + +case class EventsBatchExercise( + event_offset: Array[String], + transaction_id: Array[String], + ledger_effective_time: Array[String], // timestamp + command_id: Array[String], + workflow_id: Array[String], + application_id: Array[String], + submitters: Array[String], // '|' separated list + node_index: Array[java.lang.Integer], + event_id: Array[String], + contract_id: Array[String], + template_id: Array[String], + flat_event_witnesses: Array[String], // '|' separated list + tree_event_witnesses: Array[String], // '|' separated list + create_key_value: Array[Array[Byte]], + exercise_choice: Array[String], + exercise_argument: Array[Array[Byte]], + exercise_result: Array[Array[Byte]], + exercise_actors: Array[String], // '|' separated list + exercise_child_event_ids: Array[String], // '|' separated list + event_sequential_id: Array[Long], + create_key_value_compression: Array[java.lang.Integer], + exercise_argument_compression: Array[java.lang.Integer], + exercise_result_compression: Array[java.lang.Integer], +) extends NeverEqualsOverride + +case class ConfigurationEntriesBatch( + ledger_offset: Array[String], + recorded_at: Array[String], // timestamp + submission_id: Array[String], + typ: Array[String], + configuration: Array[Array[Byte]], + rejection_reason: Array[String], +) extends NeverEqualsOverride + +case class PackageEntriesBatch( + ledger_offset: Array[String], + recorded_at: Array[String], // timestamp + submission_id: Array[String], + typ: Array[String], + rejection_reason: Array[String], +) extends NeverEqualsOverride + +case class PackagesBatch( + package_id: Array[String], + upload_id: Array[String], + source_description: Array[String], + size: Array[Long], + known_since: Array[String], // timestamp + ledger_offset: Array[String], + _package: Array[Array[Byte]], +) extends NeverEqualsOverride + +case class PartiesBatch( + party: Array[String], + display_name: Array[String], + explicit: Array[Boolean], + ledger_offset: Array[String], + is_local: Array[Boolean], +) extends NeverEqualsOverride + +case class PartyEntriesBatch( + ledger_offset: Array[String], + recorded_at: Array[String], // timestamp + submission_id: Array[String], + party: Array[String], + display_name: Array[String], + typ: Array[String], + rejection_reason: Array[String], + is_local: Array[java.lang.Boolean], +) extends NeverEqualsOverride + +case class CommandCompletionsBatch( + completion_offset: Array[String], + record_time: Array[String], // timestamp + application_id: Array[String], + submitters: Array[String], // '|' separated list + command_id: Array[String], + transaction_id: Array[String], + status_code: Array[java.lang.Integer], + status_message: Array[String], +) extends NeverEqualsOverride + +case class CommandDeduplicationBatch(deduplication_key: Array[String]) extends NeverEqualsOverride object RawDBBatchPostgreSQLV1 { @@ -507,7 +508,7 @@ object RawDBBatchPostgreSQLV1 { def build(): RawDBBatchPostgreSQLV1 = RawDBBatchPostgreSQLV1( eventsBatchDivulgence = Option(eventsBatchBuilderDivulgence).map(b => - new EventsBatchDivulgence( + EventsBatchDivulgence( event_offset = b.event_offset.result(), command_id = b.command_id.result(), workflow_id = b.workflow_id.result(), @@ -522,7 +523,7 @@ object RawDBBatchPostgreSQLV1 { ) ), eventsBatchCreate = Option(eventsBatchBuilderCreate).map(b => - new EventsBatchCreate( + EventsBatchCreate( event_offset = b.event_offset.result(), transaction_id = b.transaction_id.result(), ledger_effective_time = b.ledger_effective_time.result(), @@ -548,7 +549,7 @@ object RawDBBatchPostgreSQLV1 { ) ), eventsBatchConsumingExercise = Option(eventsBatchBuilderConsumingExercise).map(b => - new EventsBatchExercise( + EventsBatchExercise( event_offset = b.event_offset.result(), transaction_id = b.transaction_id.result(), ledger_effective_time = b.ledger_effective_time.result(), @@ -575,7 +576,7 @@ object RawDBBatchPostgreSQLV1 { ) ), eventsBatchNonConsumingExercise = Option(eventsBatchBuilderNonConsumingExercise).map(b => - new EventsBatchExercise( + EventsBatchExercise( event_offset = b.event_offset.result(), transaction_id = b.transaction_id.result(), ledger_effective_time = b.ledger_effective_time.result(), @@ -602,7 +603,7 @@ object RawDBBatchPostgreSQLV1 { ) ), configurationEntriesBatch = Option(configurationEntriesBatchBuilder).map(b => - new ConfigurationEntriesBatch( + ConfigurationEntriesBatch( ledger_offset = b.ledger_offset.result(), recorded_at = b.recorded_at.result(), submission_id = b.submission_id.result(), @@ -612,7 +613,7 @@ object RawDBBatchPostgreSQLV1 { ) ), packageEntriesBatch = Option(packageEntriesBatchBuilder).map(b => - new PackageEntriesBatch( + PackageEntriesBatch( ledger_offset = b.ledger_offset.result(), recorded_at = b.recorded_at.result(), submission_id = b.submission_id.result(), @@ -621,7 +622,7 @@ object RawDBBatchPostgreSQLV1 { ) ), packagesBatch = Option(packagesBatchBuilder).map(b => - new PackagesBatch( + PackagesBatch( package_id = b.package_id.result(), upload_id = b.upload_id.result(), source_description = b.source_description.result(), @@ -632,7 +633,7 @@ object RawDBBatchPostgreSQLV1 { ) ), partiesBatch = Option(partiesBatchBuilder).map(b => - new PartiesBatch( + PartiesBatch( party = b.party.result(), display_name = b.display_name.result(), explicit = b.explicit.result(), @@ -641,7 +642,7 @@ object RawDBBatchPostgreSQLV1 { ) ), partyEntriesBatch = Option(partyEntriesBatchBuilder).map(b => - new PartyEntriesBatch( + PartyEntriesBatch( ledger_offset = b.ledger_offset.result(), recorded_at = b.recorded_at.result(), submission_id = b.submission_id.result(), @@ -653,7 +654,7 @@ object RawDBBatchPostgreSQLV1 { ) ), commandCompletionsBatch = Option(commandCompletionsBatchBuilder).map(b => - new CommandCompletionsBatch( + CommandCompletionsBatch( completion_offset = b.completion_offset.result(), record_time = b.record_time.result(), application_id = b.application_id.result(), @@ -665,7 +666,7 @@ object RawDBBatchPostgreSQLV1 { ) ), commandDeduplicationBatch = Option(commandDeduplicationBatchBuilder).map(b => - new CommandDeduplicationBatch( + CommandDeduplicationBatch( deduplication_key = b.deduplication_key.result() ) ), diff --git a/libs-scala/scala-utils/src/main/scala/com/daml/scalautil/NeverEqualsOverride.scala b/libs-scala/scala-utils/src/main/scala/com/daml/scalautil/NeverEqualsOverride.scala new file mode 100644 index 000000000000..ad98c3f81df3 --- /dev/null +++ b/libs-scala/scala-utils/src/main/scala/com/daml/scalautil/NeverEqualsOverride.scala @@ -0,0 +1,15 @@ +// Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.daml.scalautil + +/** This trait is useful to be mixed in if: + * - equality semantics for the base type is not an issue + * - class has non-comparable typed fields + * + * Practical usage: mixing in in DTO ADT top trait, which has case class implementors having Array fields: disables equality semantics, and prevents Wartremover warnings. + */ +trait NeverEqualsOverride extends Equals { + override final def canEqual(o: Any) = false + override final def equals(o: Any): Boolean = false +}