diff --git a/bazel-java-deps.bzl b/bazel-java-deps.bzl index fdb80c60d4ae..451017e2d1c8 100644 --- a/bazel-java-deps.bzl +++ b/bazel-java-deps.bzl @@ -63,6 +63,7 @@ def install_java_deps(): "commons-io:commons-io:2.5", "com.oracle.database.jdbc:ojdbc8:19.8.0.0", "com.sparkjava:spark-core:2.9.1", + "com.oracle.database.jdbc.debug:ojdbc8_g:19.8.0.0", "com.squareup:javapoet:1.11.1", "com.storm-enroute:scalameter_{}:0.19".format(scala_major_version), "com.storm-enroute:scalameter-core_{}:0.19".format(scala_major_version), diff --git a/ledger/participant-integration-api/BUILD.bazel b/ledger/participant-integration-api/BUILD.bazel index 20fac547d329..d1c249d7321a 100644 --- a/ledger/participant-integration-api/BUILD.bazel +++ b/ledger/participant-integration-api/BUILD.bazel @@ -61,7 +61,6 @@ compile_deps = [ "@maven//:io_opentelemetry_opentelemetry_context", "@maven//:org_slf4j_slf4j_api", # this oracle import is problematic for daml assistant build - "@maven//:com_oracle_database_jdbc_ojdbc8", ] scala_compile_deps = [ @@ -73,6 +72,7 @@ scala_compile_deps = [ "@maven//:org_scala_lang_modules_scala_collection_compat", "@maven//:org_scala_lang_modules_scala_java8_compat", "@maven//:org_scalaz_scalaz_core", + "@maven//:io_spray_spray_json", ] runtime_deps = [ @@ -148,7 +148,6 @@ da_scala_library( visibility = ["//visibility:public"], runtime_deps = [ "@maven//:com_h2database_h2", - "@maven//:com_oracle_database_jdbc_ojdbc8", "@maven//:org_postgresql_postgresql", ], deps = [ @@ -307,7 +306,6 @@ da_scala_test_suite( ], tags = [] if oracle_testing else ["manual"], runtime_deps = [ - "@maven//:com_oracle_database_jdbc_ojdbc8", ], deps = [ ":participant-integration-api", diff --git a/ledger/participant-integration-api/src/main/resources/db/migration/oracle/V1__Init.sql b/ledger/participant-integration-api/src/main/resources/db/migration/oracle/V1__Init.sql index 920f7d66a185..65fa29c3be48 100644 --- a/ledger/participant-integration-api/src/main/resources/db/migration/oracle/V1__Init.sql +++ b/ledger/participant-integration-api/src/main/resources/db/migration/oracle/V1__Init.sql @@ -7,18 +7,6 @@ -- subsequently dropped by V30__ --- custom array of varchar2 type used by several columns across tables --- declaring upfront so this type is defined globally -create type VARCHAR_ARRAY as VARRAY (32767) OF VARCHAR2(4000); -/ -create type SMALLINT_ARRAY as VARRAY (32767) of SMALLINT; -/ -create type BYTE_ARRAY_ARRAY as VARRAY (32767) of RAW(2000); -/ -create type TIMESTAMP_ARRAY as VARRAY (32767) of TIMESTAMP; -/ -create type BOOLEAN_ARRAY as VARRAY (32767) of NUMBER(1, 0); -/ CREATE TABLE parameters -- this table is meant to have a single row storing all the parameters we have ( @@ -193,7 +181,7 @@ CREATE TABLE participant_command_completions record_time TIMESTAMP not null, application_id NVARCHAR2(1000) not null, - submitters VARCHAR_ARRAY not null, + submitters CLOB NOT NULL CONSTRAINT ensure_json_submitters CHECK (submitters IS JSON), command_id NVARCHAR2(1000) not null, transaction_id NVARCHAR2(1000), -- null if the command was rejected and checkpoints @@ -201,7 +189,6 @@ CREATE TABLE participant_command_completions status_message NVARCHAR2(1000) -- null for successful command and checkpoints ); --- TODO https://github.com/digital-asset/daml/issues/9493 create index participant_command_completions_idx on participant_command_completions(completion_offset, application_id); --------------------------------------------------------------------------------------------------- @@ -233,12 +220,12 @@ create table participant_events command_id VARCHAR2(1000), workflow_id VARCHAR2(1000), -- null unless provided by a Ledger API call application_id VARCHAR2(1000), - submitters VARCHAR_ARRAY, + submitters CLOB NOT NULL CONSTRAINT ensure_json_participant_submitters CHECK (submitters IS JSON), -- non-null iff this event is a create create_argument BLOB, - create_signatories VARCHAR_ARRAY, - create_observers VARCHAR_ARRAY, + create_signatories CLOB NOT NULL CONSTRAINT ensure_json_participant_create_signatories CHECK (create_signatories IS JSON), + create_observers CLOB NOT NULL CONSTRAINT ensure_json_participant_create_observers CHECK (create_observers IS JSON), create_agreement_text VARCHAR2(1000), -- null if agreement text is not provided create_consumed_at VARCHAR2(4000), -- null if the contract created by this event is active create_key_value BLOB, -- null if the contract created by this event has no key @@ -248,11 +235,11 @@ create table participant_events exercise_choice VARCHAR2(1000), exercise_argument BLOB, exercise_result BLOB, - exercise_actors VARCHAR_ARRAY, - exercise_child_event_ids VARCHAR_ARRAY, -- event identifiers of consequences of this exercise + exercise_actors CLOB NOT NULL CONSTRAINT ensure_json_participant_exercise_actors CHECK (exercise_actors IS JSON), + exercise_child_event_ids CLOB NOT NULL CONSTRAINT ensure_json_participant_exercise_child_event_ids CHECK (exercise_child_event_ids IS JSON), -- event identifiers of consequences of this exercise - flat_event_witnesses VARCHAR_ARRAY not null, - tree_event_witnesses VARCHAR_ARRAY not null, + flat_event_witnesses CLOB NOT NULL CONSTRAINT ensure_json_participant_flat_event_witnesses CHECK (flat_event_witnesses IS JSON), + tree_event_witnesses CLOB NOT NULL CONSTRAINT ensure_json_participant_tree_event_witnesses CHECK (tree_event_witnesses IS JSON), event_sequential_id NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY not null, create_argument_compression NUMBER, @@ -280,9 +267,8 @@ create index participant_events_event_sequential_id on participant_events (event -- 5. we need this index to convert event_offset to event_sequential_id create index participant_events_event_offset on participant_events (event_offset); --- TODO https://github.com/digital-asset/daml/issues/9493 --- create index participant_events_flat_event_witnesses_idx on participant_events (flat_event_witnesses); --- create index participant_events_tree_event_witnesses_idx on participant_events (tree_event_witnesses); +create index participant_events_flat_event_witnesses_idx on participant_events (JSON_ARRAY(flat_event_witnesses)); +create index participant_events_tree_event_witnesses_idx on participant_events (JSON_ARRAY(tree_event_witnesses)); --------------------------------------------------------------------------------------------------- @@ -303,7 +289,7 @@ create table participant_contracts create_argument BLOB not null, -- the following fields are null for divulged contracts - create_stakeholders VARCHAR_ARRAY, + create_stakeholders CLOB NOT NULL CONSTRAINT ensure_json_create_stakeholders CHECK (create_stakeholders IS JSON), create_key_hash VARCHAR2(4000), create_ledger_effective_time TIMESTAMP, create_argument_compression SMALLINT diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/Conversions.scala b/ledger/participant-integration-api/src/main/scala/platform/store/Conversions.scala index 7715180f7425..123125f2a001 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/Conversions.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/Conversions.scala @@ -3,10 +3,9 @@ package com.daml.platform.store -import java.sql.{Connection, JDBCType, PreparedStatement, Timestamp, Types} +import java.sql.{PreparedStatement, Timestamp, Types} import java.time.Instant import java.util.Date - import anorm.Column.nonNull import anorm._ import com.daml.ledger.EventId @@ -17,14 +16,33 @@ import com.daml.lf.crypto.Hash import com.daml.lf.data.Ref import com.daml.lf.data.Ref.Party import com.daml.lf.value.Value -import com.zaxxer.hikari.pool.HikariProxyConnection import io.grpc.Status.Code - +import spray.json._ +import DefaultJsonProtocol._ +import java.io.BufferedReader import scala.language.implicitConversions +import java.util.stream.Collectors private[platform] object OracleArrayConversions { - import oracle.jdbc.OracleConnection + implicit object PartyJsonFormat extends RootJsonFormat[Party] { + def write(c: Party) = + JsString(c) + + def read(value: JsValue) = value match { + case JsString(s) => s.asInstanceOf[Party] + case _ => deserializationError("Party expected") + } + } + + implicit object LedgerStringJsonFormat extends RootJsonFormat[Ref.LedgerString] { + def write(c: Ref.LedgerString) = + JsString(c) + def read(value: JsValue) = value match { + case JsString(s) => s.asInstanceOf[Ref.LedgerString] + case _ => deserializationError("Ledger string expected") + } + } implicit object StringArrayParameterMetadata extends ParameterMetaData[Array[String]] { override def sqlType: String = "ARRAY" override def jdbcType: Int = java.sql.Types.ARRAY @@ -37,90 +55,6 @@ private[platform] object OracleArrayConversions { val jdbcType = Types.INTEGER } - @SuppressWarnings(Array("org.wartremover.warts.ArrayEquals")) - abstract sealed class ArrayToStatement[T](oracleTypeName: String) - extends ToStatement[Array[T]] - with NotNullGuard { - override def set(s: PreparedStatement, index: Int, v: Array[T]): Unit = { - if (v == (null: AnyRef)) { - s.setNull(index, JDBCType.ARRAY.getVendorTypeNumber, oracleTypeName) - } else { - s.setObject( - index, - unwrapConnection(s).createARRAY(oracleTypeName, v.asInstanceOf[Array[AnyRef]]), - JDBCType.ARRAY.getVendorTypeNumber, - ) - } - } - } - - implicit object ByteArrayArrayToStatement - extends ArrayToStatement[Array[Byte]]("BYTE_ARRAY_ARRAY") - - implicit object TimestampArrayToStatement extends ArrayToStatement[Timestamp]("TIMESTAMP_ARRAY") - - implicit object RefPartyArrayToStatement extends ArrayToStatement[Ref.Party]("VARCHAR_ARRAY") - - implicit object CharArrayToStatement extends ArrayToStatement[String]("VARCHAR_ARRAY") - - implicit object IntegerArrayToStatement extends ArrayToStatement[Integer]("SMALLINT_ARRAY") - - implicit object BooleanArrayToStatement - extends ArrayToStatement[java.lang.Boolean]("BOOLEAN_ARRAY") - - implicit object InstantArrayToStatement extends ToStatement[Array[Instant]] { - override def set(s: PreparedStatement, index: Int, v: Array[Instant]): Unit = { - s.setObject( - index, - unwrapConnection(s).createARRAY("TIMESTAMP_ARRAY", v.map(java.sql.Timestamp.from)), - JDBCType.ARRAY.getVendorTypeNumber, - ) - } - } - - @SuppressWarnings(Array("org.wartremover.warts.ArrayEquals")) - implicit object StringOptionArrayArrayToStatement extends ToStatement[Option[Array[String]]] { - override def set(s: PreparedStatement, index: Int, stringOpts: Option[Array[String]]): Unit = { - stringOpts match { - case None => s.setNull(index, JDBCType.ARRAY.getVendorTypeNumber, "VARCHAR_ARRAY") - case Some(arr) => - s.setObject( - index, - unwrapConnection(s) - .createARRAY("VARCHAR_ARRAY", arr.asInstanceOf[Array[AnyRef]]), - JDBCType.ARRAY.getVendorTypeNumber, - ) - } - } - } - - object IntToSmallIntConversions { - - implicit object IntOptionArrayArrayToStatement extends ToStatement[Array[Option[Int]]] { - override def set(s: PreparedStatement, index: Int, intOpts: Array[Option[Int]]): Unit = { - val intOrNullsArray = intOpts.map(_.map(new Integer(_)).orNull) - s.setObject( - index, - unwrapConnection(s) - .createARRAY("SMALLINT_ARRAY", intOrNullsArray.asInstanceOf[Array[AnyRef]]), - JDBCType.ARRAY.getVendorTypeNumber, - ) - } - } - } - - private def unwrapConnection[T](s: PreparedStatement): OracleConnection = { - s.getConnection match { - case hikari: HikariProxyConnection => - hikari.unwrap(classOf[OracleConnection]) - case oracle: OracleConnection => - oracle - case c: Connection => - sys.error( - s"Unsupported connection type for creating Oracle integer array: ${c.getClass.getSimpleName}" - ) - } - } } private[platform] object JdbcArrayConversions { @@ -216,6 +150,49 @@ private[platform] object Conversions { } } + object DefaultImplicitArrayColumn { + val default = Column.of[Array[String]] + } + + object ArrayColumnToStringArray { + // This is used to allow us to convert oracle CLOB fields storing JSON text into Array[String]. + // We first summon the default Anorm column for an Array[String], and run that - this preserves + // the behavior PostgreSQL is expecting. If that fails, we then try our Oracle specific deserialization + // strategies + + implicit val arrayColumnToStringArray: Column[Array[String]] = nonNull { (value, meta) => + DefaultImplicitArrayColumn.default(value, meta) match { + case Right(value) => Right(value) + case Left(_) => + val MetaDataItem(qualified, _, _) = meta + value match { + case jsonArrayString: String => + Right(jsonArrayString.parseJson.convertTo[Array[String]]) + case clob: java.sql.Clob => + try { + val reader = clob.getCharacterStream + val br = new BufferedReader(reader) + val jsonArrayString = br.lines.collect(Collectors.joining) + reader.close + Right(jsonArrayString.parseJson.convertTo[Array[String]]) + } catch { + case e: Throwable => + Left( + TypeDoesNotMatch( + s"Cannot convert $value: received CLOB but failed to deserialize to " + + s"string array for column $qualified. Error message: ${e.getMessage}" + ) + ) + } + case _ => + Left( + TypeDoesNotMatch(s"Cannot convert $value: to string array for column $qualified") + ) + } + } + } + } + // PackageId implicit val columnToPackageId: Column[Ref.PackageId] = diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/JdbcLedgerDao.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/JdbcLedgerDao.scala index 59fc5b569743..6d72f9d39dbb 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/JdbcLedgerDao.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/JdbcLedgerDao.scala @@ -64,6 +64,8 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try import scala.util.control.NonFatal +import spray.json._ +import spray.json.DefaultJsonProtocol._ private final case class ParsedPartyData( party: String, @@ -1381,8 +1383,7 @@ private[platform] object JdbcLedgerDao { recordTime: Instant, ): SimpleSql[Row] = { import com.daml.platform.store.OracleArrayConversions._ - SQL"insert into participant_command_completions(completion_offset, record_time, application_id, submitters, command_id, transaction_id) values ($offset, $recordTime, ${submitterInfo.applicationId}, ${submitterInfo.actAs - .toArray[String]}, ${submitterInfo.commandId}, $transactionId)" + SQL"insert into participant_command_completions(completion_offset, record_time, application_id, submitters, command_id, transaction_id) values ($offset, $recordTime, ${submitterInfo.applicationId}, ${submitterInfo.actAs.toJson.compactPrint}, ${submitterInfo.commandId}, $transactionId)" } override protected[JdbcLedgerDao] def prepareRejectionInsert( @@ -1392,8 +1393,8 @@ private[platform] object JdbcLedgerDao { reason: RejectionReason, ): SimpleSql[Row] = { import com.daml.platform.store.OracleArrayConversions._ - SQL"insert into participant_command_completions(completion_offset, record_time, application_id, submitters, command_id, status_code, status_message) values ($offset, $recordTime, ${submitterInfo.applicationId}, ${submitterInfo.actAs - .toArray[String]}, ${submitterInfo.commandId}, ${reason.code.value()}, ${reason.description})" + SQL"insert into participant_command_completions(completion_offset, record_time, application_id, submitters, command_id, status_code, status_message) values ($offset, $recordTime, ${submitterInfo.applicationId}, ${submitterInfo.actAs.toJson.compactPrint}, ${submitterInfo.commandId}, ${reason.code + .value()}, ${reason.description})" } // spaces which are subsequently trimmed left only for readability diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/ContractsTableOracle.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/ContractsTableOracle.scala index c0f580db9620..3057798ad92d 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/ContractsTableOracle.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/ContractsTableOracle.scala @@ -12,6 +12,8 @@ import com.daml.platform.store.Conversions._ import com.daml.platform.store.dao.events.ContractsTable.Executable import com.daml.platform.store.serialization.Compression import com.daml.platform.store.OracleArrayConversions._ +import spray.json._ +import spray.json.DefaultJsonProtocol._ object ContractsTableOracle extends ContractsTable { @@ -43,7 +45,7 @@ object ContractsTableOracle extends ContractsTable { "template_id" -> templateId, "create_argument" -> createArgument, "create_ledger_effective_time" -> ledgerEffectiveTime, - "create_stakeholders" -> stakeholders.toArray[String], + "create_stakeholders" -> stakeholders.toJson.compactPrint, "create_key_hash" -> key.map(_.hash), "create_argument_compression" -> createArgumentCompression.id, ) diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTable.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTable.scala index b3ef30c97887..f8269034b1e6 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTable.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTable.scala @@ -6,7 +6,6 @@ package com.daml.platform.store.dao.events import java.io.InputStream import java.sql.Connection import java.time.Instant - import anorm.SqlParser.{array, binaryStream, bool, int, long, str} import anorm.{RowParser, ~} import com.daml.ledger.participant.state.v1.Offset @@ -46,6 +45,8 @@ private[events] object EventsTable { Offset ~ String ~ Int ~ Long ~ String ~ String ~ Instant ~ Identifier ~ Option[String] ~ Option[String] ~ Array[String] + import com.daml.platform.store.Conversions.ArrayColumnToStringArray.arrayColumnToStringArray + private val sharedRow: RowParser[SharedRow] = offset("event_offset") ~ str("transaction_id") ~ diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableFlatEventsRangeQueries.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableFlatEventsRangeQueries.scala index 8f86fe25f9e0..995145730dc2 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableFlatEventsRangeQueries.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableFlatEventsRangeQueries.scala @@ -58,7 +58,6 @@ private[events] sealed abstract class EventsTableFlatEventsRangeQueries[Offset] pageSize: Int, ): SqlSequence[Vector[EventsTable.Entry[Raw.FlatEvent]]] = { require(filter.nonEmpty, "The request must be issued by at least one party") - // Route the request to the correct underlying query val frqK = if (filter.size == 1) { val (party, templateIds) = filter.iterator.next() @@ -111,7 +110,6 @@ private[events] sealed abstract class EventsTableFlatEventsRangeQueries[Offset] } } } - frqK match { case QueryParts.ByArith(read) => EventsRange.readPage( @@ -159,6 +157,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #${sqlFunctions.toArray(party)} as event_witnesses, case when #${sqlFunctions @@ -183,6 +182,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #${sqlFunctions.toArray(party)} as event_witnesses, case when #${sqlFunctions @@ -211,6 +211,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id @@ -238,6 +239,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id @@ -270,6 +272,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id @@ -304,6 +307,7 @@ private[events] object EventsTableFlatEventsRangeQueries { QueryParts.ByArith( read = (range, limitOpt) => { val limitClause = limitOpt.map(sqlFunctions.limitClause).getOrElse("") + SQL""" select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id @@ -331,6 +335,7 @@ private[events] object EventsTableFlatEventsRangeQueries { ): QueryParts = { val witnessesWhereClause = sqlFunctions.arrayIntersectionWhereClause("flat_event_witnesses", party) + SQL"""select #$selectColumns, #${sqlFunctions.toArray(party)} as event_witnesses, case when #${sqlFunctions .arrayIntersectionWhereClause("submitters", party)} then command_id else '' end as command_id @@ -352,6 +357,7 @@ private[events] object EventsTableFlatEventsRangeQueries { ): QueryParts = { val witnessesWhereClause = sqlFunctions.arrayIntersectionWhereClause("flat_event_witnesses", party) + SQL"""select #$selectColumns, #${sqlFunctions.toArray(party)} as event_witnesses, case when #${sqlFunctions .arrayIntersectionWhereClause("submitters", party)} then command_id else '' end as command_id @@ -376,6 +382,7 @@ private[events] object EventsTableFlatEventsRangeQueries { sqlFunctions.arrayIntersectionValues("flat_event_witnesses", parties) val submittersInPartiesClause = sqlFunctions.arrayIntersectionWhereClause("submitters", parties) + SQL"""select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id from participant_events @@ -399,6 +406,7 @@ private[events] object EventsTableFlatEventsRangeQueries { sqlFunctions.arrayIntersectionValues("flat_event_witnesses", parties) val submittersInPartiesClause = sqlFunctions.arrayIntersectionWhereClause("submitters", parties) + SQL"""select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id from participant_events @@ -427,6 +435,7 @@ private[events] object EventsTableFlatEventsRangeQueries { sqlFunctions.arrayIntersectionValues("flat_event_witnesses", parties) val submittersInPartiesClause = sqlFunctions.arrayIntersectionWhereClause("submitters", parties) + SQL"""select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id from participant_events @@ -457,6 +466,7 @@ private[events] object EventsTableFlatEventsRangeQueries { sqlFunctions.arrayIntersectionValues("flat_event_witnesses", parties) val submittersInPartiesClause = sqlFunctions.arrayIntersectionWhereClause("submitters", parties) + SQL"""select #$selectColumns, #$filteredWitnesses as event_witnesses, case when #$submittersInPartiesClause then command_id else '' end as command_id from participant_events diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableOracle.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableOracle.scala index f00dda2e6e88..a67acaca8feb 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableOracle.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/EventsTableOracle.scala @@ -11,6 +11,8 @@ import com.daml.ledger.{EventId, TransactionId} import com.daml.ledger.participant.state.v1.{Offset, SubmitterInfo, WorkflowId} import com.daml.platform.store.Conversions._ import com.daml.platform.store.OracleArrayConversions._ +import spray.json._ +import spray.json.DefaultJsonProtocol._ object EventsTableOracle extends EventsTable { @@ -80,8 +82,9 @@ object EventsTableOracle extends EventsTable { "ledger_effective_time" -> ledgerEffectiveTime, "command_id" -> submitterInfo.map(_.commandId), "application_id" -> submitterInfo.map(_.applicationId), - "submitters" -> Party.Array(submitterInfo.map(_.actAs).getOrElse(List.empty): _*), + "submitters" -> submitterInfo.map(_.actAs).getOrElse(List.empty).toJson.compactPrint, ) + for ((nodeId, node) <- events) yield { assert(stakeholders.contains(nodeId), s"No stakeholder for $nodeId") @@ -90,8 +93,8 @@ object EventsTableOracle extends EventsTable { Vector[NamedParameter]( "event_id" -> EventId(transactionId, nodeId).toLedgerString, "node_index" -> nodeId.index, - "flat_event_witnesses" -> Party.Array(stakeholders(nodeId).toSeq: _*), - "tree_event_witnesses" -> Party.Array(disclosure(nodeId).toSeq: _*), + "flat_event_witnesses" -> stakeholders(nodeId).toJson.compactPrint, + "tree_event_witnesses" -> disclosure(nodeId).toJson.compactPrint, ) val eventSpecificColumns = node match { @@ -133,8 +136,8 @@ object EventsTableOracle extends EventsTable { "template_id" -> event.coinst.template, "create_argument" -> argument, "create_argument_compression" -> argumentCompression, - "create_signatories" -> event.signatories.toArray[String], - "create_observers" -> event.stakeholders.diff(event.signatories).toArray[String], + "create_signatories" -> event.signatories.toJson.compactPrint, + "create_observers" -> event.stakeholders.diff(event.signatories).toJson.compactPrint, "create_agreement_text" -> event.coinst.agreementText, "create_key_value" -> key, "create_key_value_compression" -> keyCompression, @@ -157,17 +160,19 @@ object EventsTableOracle extends EventsTable { "exercise_argument_compression" -> argumentCompression, "exercise_result" -> result, "exercise_result_compression" -> resultCompression, - "exercise_actors" -> event.actingParties.toArray[String], + "exercise_actors" -> event.actingParties.toJson.compactPrint, "exercise_child_event_ids" -> event.children .map(EventId(transactionId, _).toLedgerString) - .toArray[String], + .toList + .toJson + .compactPrint, ) ++ emptyCreateFields private val emptyCreateFields = Vector[NamedParameter]( "create_argument" -> Option.empty[Array[Byte]], "create_argument_compression" -> Option.empty[Short], - "create_signatories" -> Option.empty[Array[String]], - "create_observers" -> Option.empty[Array[String]], + "create_signatories" -> "[]", + "create_observers" -> "[]", "create_agreement_text" -> Option.empty[String], "create_key_value" -> Option.empty[Array[Byte]], "create_key_value_compression" -> Option.empty[Short], @@ -180,8 +185,8 @@ object EventsTableOracle extends EventsTable { "exercise_argument_compression" -> Option.empty[Short], "exercise_result" -> Option.empty[Array[Byte]], "exercise_result_compression" -> Option.empty[Short], - "exercise_actors" -> Option.empty[Array[String]], - "exercise_child_event_ids" -> Option.empty[Array[String]], + "exercise_actors" -> "[]", + "exercise_child_event_ids" -> "[]", ) private val updateArchived = diff --git a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/SqlFunctions.scala b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/SqlFunctions.scala index 2a09b281366d..b73f6485e108 100644 --- a/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/SqlFunctions.scala +++ b/ledger/participant-integration-api/src/main/scala/platform/store/dao/events/SqlFunctions.scala @@ -63,14 +63,15 @@ private[dao] object SqlFunctions { // TODO https://github.com/digital-asset/daml/issues/9493 // This is likely extremely inefficient due to the multiple full tablescans on unindexed varray column override def arrayIntersectionWhereClause(arrayColumn: String, parties: Set[Party]): String = - parties - .map(party => s"('$party') IN (SELECT * FROM TABLE($arrayColumn))") - .mkString("(", " or ", ")") + s"""JSON_EXISTS($arrayColumn, '$$[*]?(@ in ("${parties.mkString("""", """")}"))')""" override def arrayIntersectionValues(arrayColumn: String, parties: Set[Party]): String = - s"CAST(MULTISET(select unique $arrayColumn.* FROM TABLE($arrayColumn) $arrayColumn intersect select * from TABLE(VARCHAR_ARRAY(${format(parties)}))) as VARCHAR_ARRAY)" + s"""(select json_arrayagg(value) from (select value + |from json_table($arrayColumn, '$$[*]' columns (value PATH '$$')) + |where ${parties.map { party => s"value = '$party'" }.mkString(" or ")})) + |""".stripMargin - override def toArray(value: String) = s"VARCHAR_ARRAY('$value')" + override def toArray(value: String) = s"""'["$value"]'""" override def limitClause(numRows: Int) = s"fetch next $numRows rows only" diff --git a/ledger/participant-integration-api/src/test/lib/scala/platform/store/dao/JdbcLedgerDaoActiveContractsSpec.scala b/ledger/participant-integration-api/src/test/lib/scala/platform/store/dao/JdbcLedgerDaoActiveContractsSpec.scala index a3d2541b39eb..df0ee7a72348 100644 --- a/ledger/participant-integration-api/src/test/lib/scala/platform/store/dao/JdbcLedgerDaoActiveContractsSpec.scala +++ b/ledger/participant-integration-api/src/test/lib/scala/platform/store/dao/JdbcLedgerDaoActiveContractsSpec.scala @@ -268,5 +268,4 @@ private[dao] trait JdbcLedgerDaoActiveContractsSpec source: Source[GetActiveContractsResponse, NotUsed] ): Future[Seq[CreatedEvent]] = source.runWith(Sink.seq).map(_.flatMap(_.activeContracts)) - } diff --git a/ledger/participant-integration-api/src/test/resources/OracleLog.properties b/ledger/participant-integration-api/src/test/resources/OracleLog.properties new file mode 100644 index 000000000000..c4a76fd3701e --- /dev/null +++ b/ledger/participant-integration-api/src/test/resources/OracleLog.properties @@ -0,0 +1,6 @@ +level=FINE +oracle.jdbc.level=FINE +oracle.jdbc.handlers=java.util.logging.ConsoleHandler +java.util.logging.ConsoleHandler.level=FINE +java.util.logging.ConsoleHandler.formatter = \ +java.util.logging.SimpleFormatter diff --git a/maven_install_2.13.json b/maven_install_2.13.json index f269ba6bf6ed..1e003b84689d 100644 --- a/maven_install_2.13.json +++ b/maven_install_2.13.json @@ -1,6 +1,6 @@ { "dependency_tree": { - "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": -367049757, + "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": -536058548, "conflict_resolution": {}, "dependencies": [ { @@ -1508,6 +1508,28 @@ "sha256": "047ffe7c529b7cbbd3efa7e35a7ce27d3c4ffae05fa77f1ff4cfd46a8b40a52a", "url": "https://repo1.maven.org/maven2/com/lihaoyi/upickle-core_2.13/1.2.0/upickle-core_2.13-1.2.0-sources.jar" }, + { + "coord": "com.oracle.database.jdbc.debug:ojdbc8_g:19.8.0.0", + "dependencies": [], + "directDependencies": [], + "file": "v1/https/repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0.jar", + "mirror_urls": [ + "https://repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0.jar" + ], + "sha256": "a7d741840277fc6be059d53e9aa46a4568a570cda55c7491a620993ff558738e", + "url": "https://repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0.jar" + }, + { + "coord": "com.oracle.database.jdbc.debug:ojdbc8_g:jar:sources:19.8.0.0", + "dependencies": [], + "directDependencies": [], + "file": "v1/https/repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0-sources.jar", + "mirror_urls": [ + "https://repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0-sources.jar" + ], + "sha256": "25a9cb395eee4096d117e8146a48dfa2c748eea7884b0fa01d8a1b99f399d01d", + "url": "https://repo1.maven.org/maven2/com/oracle/database/jdbc/debug/ojdbc8_g/19.8.0.0/ojdbc8_g-19.8.0.0-sources.jar" + }, { "coord": "com.oracle.database.jdbc:ojdbc8:19.8.0.0", "dependencies": [],