diff --git a/build.sbt b/build.sbt index cd1762443..784da8eb0 100644 --- a/build.sbt +++ b/build.sbt @@ -138,9 +138,10 @@ lazy val `quill-sql` = // errors will happen. Even if the pprint classes are actually there "io.suzaku" %% "boopickle" % "1.4.0", "com.lihaoyi" %% "pprint" % "0.6.6", - "io.getquill" %% "quill-engine" % "4.2.0", + "ch.qos.logback" % "logback-classic" % "1.2.3" % Test, + "io.getquill" %% "quill-engine" % "4.2.1-SNAPSHOT", "dev.zio" %% "zio" % "2.0.0", - ("io.getquill" %% "quill-util" % "4.2.0") + ("io.getquill" %% "quill-util" % "4.2.1-SNAPSHOT") .excludeAll({ if (isCommunityBuild) Seq(ExclusionRule(organization = "org.scalameta", name = "scalafmt-core_2.13")) diff --git a/build/Dockerfile-ping b/build/Dockerfile-ping new file mode 100644 index 000000000..1db86eaec --- /dev/null +++ b/build/Dockerfile-ping @@ -0,0 +1,5 @@ +# Builds a ubuntu-based postgres image whose latency can be modified to be highe +# for performance experimentation. +FROM ubuntu:20.04 +RUN apt-get update && \ + apt-get install iproute2 iputils-ping -y diff --git a/build/Dockerfile-postgres b/build/Dockerfile-postgres new file mode 100644 index 000000000..1be65c2ad --- /dev/null +++ b/build/Dockerfile-postgres @@ -0,0 +1,5 @@ +# Builds a ubuntu-based postgres image whose latency can be modified to be highe +# for performance experimentation. +FROM ubuntu/postgres:12-20.04_edge +RUN apt-get update && \ + apt-get install iproute2 iputils-ping -y diff --git a/docker-compose.yml b/docker-compose.yml index 6baa27d97..d8e96b3bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,12 +2,16 @@ version: '2.2' services: postgres: - image: postgres:9.6 + build: + context: . + dockerfile: ./build/Dockerfile-postgres ports: - "15432:5432" environment: - POSTGRES_USER=postgres - POSTGRES_HOST_AUTH_METHOD=trust + cap_add: + - NET_ADMIN mysql: image: mysql/mysql-server:8.0.23 # use this because it supports ARM64 architecture for M1 Mac @@ -76,6 +80,13 @@ services: command: - ./build/setup_local.sh + docker_ping: + build: + context: . + dockerfile: ./build/Dockerfile-ping + container_name: docker_ping + command: > + ping postgres sbt: build: diff --git a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala index c29b333e9..4265327c9 100644 --- a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala +++ b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala @@ -84,7 +84,7 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) protected def page(rs: AsyncResultSet): CIO[Chunk[Row]] = ZIO.succeed { val builder = ChunkBuilder.make[Row](rs.remaining()) diff --git a/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala b/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala index 3bd5af31c..c0027139c 100644 --- a/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala +++ b/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala @@ -59,7 +59,7 @@ object Quill { @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[Any, Throwable, Unit] = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, Throwable, Unit] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) def streamQuery[T](fetchSize: Option[Int], cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZStream[Any, Throwable, T] = onSessionStream(underlying.streamQuery(fetchSize, cql, prepare, extractor)(info, dc)) diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala index c865f9b72..9f95ce0a8 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala @@ -56,7 +56,7 @@ class CassandraAsyncContext[+N <: NamingStrategy]( @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): Future[Unit] = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Unit] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Unit] = InternalApi.runBatchAction(quoted, 1) // override def performIO[T](io: IO[T, _], transactional: Boolean = false)(implicit ec: ExecutionContext): Result[T] = { // if (transactional) logger.underlying.warn("Cassandra doesn't support transactions, ignoring `io.transactional`") diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala index b3d9fe9d4..f9e3973cf 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala @@ -39,7 +39,7 @@ class CassandraSyncContext[+N <: NamingStrategy]( @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): Unit = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Unit = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Unit = InternalApi.runBatchAction(quoted, 1) override protected def context: Runner = () diff --git a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala index 0daedce65..20809b17e 100644 --- a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala +++ b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala @@ -57,9 +57,13 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ConnectionIO[List[T]] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted, 1) // Logging behavior should be identical to JdbcContextBase.scala, which includes a couple calls // to log.underlying below. diff --git a/quill-jasync-postgres/src/main/scala/io/getquill/PostgresJAsyncContext.scala b/quill-jasync-postgres/src/main/scala/io/getquill/PostgresJAsyncContext.scala index f07aba145..c851e3140 100644 --- a/quill-jasync-postgres/src/main/scala/io/getquill/PostgresJAsyncContext.scala +++ b/quill-jasync-postgres/src/main/scala/io/getquill/PostgresJAsyncContext.scala @@ -40,9 +40,13 @@ class PostgresJAsyncContext[+N <: NamingStrategy](naming: N, pool: ConnectionPoo @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): Future[List[T]] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Seq[Long]] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Future[Seq[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Seq[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): Future[Seq[T]] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Future[Seq[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): Future[Seq[T]] = InternalApi.runBatchActionReturning(quoted, 1) override protected def extractActionResult[O](returningAction: ReturnAction, returningExtractor: Extractor[O])(result: DBQueryResult): List[O] = result.getRows.asScala.toList.map(row => returningExtractor(row, ())) diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala index c26d557c5..548ebd30c 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala @@ -91,9 +91,13 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] e @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) /** * Since we are immediately executing the ZIO that creates this fiber ref whether it is global is not really relevant since it does not really use scope diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala index 4441446fd..09e200df1 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala @@ -52,9 +52,13 @@ abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingS @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) // Need explicit return-type annotations due to scala/bug#8356. Otherwise macro system will not understand Result[Long]=Task[Long] etc... override def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): QCIO[Long] = diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala index 7dd0fa9d2..48ef8a595 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala @@ -65,9 +65,13 @@ trait QuillBaseContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Any, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, Long] = onDS(dsDelegate.executeAction(sql, prepare)(info, dc)) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala index 32c0893c9..545e58927 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala @@ -16,6 +16,7 @@ trait MysqlJdbcContextBase[+N <: NamingStrategy] trait SqliteJdbcContextBase[+N <: NamingStrategy] extends SqliteJdbcTypes[N] + with SqliteExecuteOverride[N] with JdbcContextBase[SqliteDialect, N] trait SqlServerJdbcContextBase[+N <: NamingStrategy] diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala index 910eef1d1..d7dfd5ee0 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala @@ -47,9 +47,13 @@ abstract class JdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): List[T] = InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): List[Long] = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): List[Long] = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): List[Long] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): List[T] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): List[T] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): List[T] = InternalApi.runBatchActionReturning(quoted, 1) override def wrap[T](t: => T): T = t override def push[A, B](result: A)(f: A => B): B = f(result) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala index 12c0a3d31..d77021f41 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala @@ -78,7 +78,7 @@ trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] ex groups.flatMap { case BatchGroup(sql, prepare) => val ps = conn.prepareStatement(sql) - logger.underlying.debug("Batch: {}", sql) + //logger.underlying.debug("Batch: {}", sql.take(200) + (if (sql.length > 200) "..." else "")) prepare.foreach { f => val (params, _) = f(ps, conn) logger.logBatchItem(sql, params) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala index eae9ace67..7f15dafd4 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala @@ -4,6 +4,11 @@ import java.sql.Types import io.getquill._ import io.getquill.context.ExecutionInfo import io.getquill.util.ContextLogger +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import scala.util.control.NonFatal.apply +import scala.util.control.NonFatal trait PostgresJdbcTypes[+N <: NamingStrategy] extends JdbcContextTypes[PostgresDialect, N] with BooleanObjectEncoding @@ -42,6 +47,48 @@ trait SqliteJdbcTypes[+N <: NamingStrategy] extends JdbcContextTypes[SqliteDiale val idiom = SqliteDialect } + +trait SqliteExecuteOverride[+N <: NamingStrategy] extends JdbcContextVerbExecute[SqliteDialect, N] { + + private val logger = ContextLogger(classOf[SqliteExecuteOverride[_]]) + + private def runInTransaction[T](conn: Connection)(op: => T): T = { + val wasAutoCommit = conn.getAutoCommit + conn.setAutoCommit(false) + val result = op + try { + conn.commit() + result + } catch { + case scala.util.control.NonFatal(e) => + conn.rollback() + throw e + } finally + conn.setAutoCommit(wasAutoCommit) + } + + override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[List[T]] = + withConnectionWrapped { conn => + logger.underlying.warn( + "Sqlite does not support Batch-Actions with returning-keys. Quill will attempt to emulate this function with single-row inserts inside a transaction but using this API is not recommended." + ) + groups.flatMap { + case BatchGroupReturning(sql, returningBehavior, prepare) => + val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) + logger.underlying.debug("Batch: {}", sql) + runInTransaction(conn) { + prepare.flatMap { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.executeUpdate() + extractResult(ps.getGeneratedKeys(), conn, extractor) + } + } + } + } +} + + /** Use extension in stead of self-pointer to `JdbcContextVerbExecute[SQLServerDialect, N]` here. Want identical * implementation to Scala2-Quill and doing it via self-pointer in Scala2-Quill will cause override-conflict errors in SqlServerExecuteOverride. */ trait SqlServerExecuteOverride[+N <: NamingStrategy] extends JdbcContextVerbExecute[SQLServerDialect, N] { @@ -54,6 +101,40 @@ trait SqlServerExecuteOverride[+N <: NamingStrategy] extends JdbcContextVerbExec logger.logQuery(sql, params) extractResult(ps.executeQuery, conn, extractor) } + + override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[List[T]] = + withConnectionWrapped { conn => + groups.flatMap { + case BatchGroupReturning(sql, returningBehavior, prepare) => + val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) + logger.underlying.debug("Batch: {}", sql) + val outputs = + prepare.flatMap { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.addBatch() + // The SQL Server drive has no ability to either go getGeneratedKeys or executeQuery + // at the end of a sequence of addBatch calls to get all inserted keys/executed queries + // (whether a `OUTPUT` clause is used in the Query or not). That means that in order + // be able to get any results, we need to use extractResult(ps.executeQuery, ...) + // on every single inserted batch! See the following mssql-jdbc issues for more detail: + // https://github.com/microsoft/mssql-jdbc/issues/358 + // https://github.com/Microsoft/mssql-jdbc/issues/245 + // Also note that Slick specifically mentions that returning-keys is generally + // not supported when jdbc-batching is used: + // https://github.com/slick/slick/blob/06ccee3cdc0722adeb8bb0658afb4a0d3524b119/slick/src/main/scala/slick/jdbc/JdbcActionComponent.scala#L654 + // Therefore slick falls back to single-row-insert batching when insertion with getGeneratedKeys is used + // + // However, in ProtoQuill we can do a little better. In this case we take advantage of multi-row inserts + // (via multiple VALUES clauses) each of which is a an element of the `prepares` list. That way, we only + // need to execute `extractResult(ps.executeQuery(),...)` once per every insert-query (where each query + // could potentially have 1000+ insert-rows via 1000 VALUES-clauses). This radically decreases + // the number of calls that need to be made to get back IDs (and other data) of the inserted rows. + extractResult(ps.executeQuery(), conn, extractor) + } + outputs + } + } } trait SqlServerJdbcTypes[+N <: NamingStrategy] extends JdbcContextTypes[SQLServerDialect, N] diff --git a/quill-jdbc/src/test/resources/logback.xml b/quill-jdbc/src/test/resources/logback.xml new file mode 100644 index 000000000..9eb253603 --- /dev/null +++ b/quill-jdbc/src/test/resources/logback.xml @@ -0,0 +1,15 @@ + + + + + + [QuillTest] %d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n + + + + + + + + + \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..775832225 --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala @@ -0,0 +1,34 @@ +package io.getquill.context.jdbc.h2 + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + +class BatchValuesJdbcSpec extends BatchValuesSpec { + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(sql"TRUNCATE TABLE Product; ALTER TABLE Product ALTER COLUMN id RESTART WITH 1".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids mustEqual productsOriginal.map(_.id) + testContext.run(get) mustEqual productsOriginal + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } +} \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..b667b437b --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala @@ -0,0 +1,35 @@ +package io.getquill.context.jdbc.mysql + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + +class BatchValuesJdbcSpec extends BatchValuesSpec { + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(query[Product].delete) + testContext.run(sql"ALTER TABLE Product AUTO_INCREMENT = 1".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids.toSet mustEqual productsOriginal.map(_.id).toSet + testContext.run(get).toSet mustEqual productsOriginal.toSet + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } +} \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..e2859f53f --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala @@ -0,0 +1,38 @@ +package io.getquill.context.jdbc.oracle + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + +class BatchValuesJdbcSpec extends BatchValuesSpec { + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(query[Product].delete) + testContext.run(sql"ALTER TABLE Product MODIFY (id GENERATED BY DEFAULT ON NULL AS IDENTITY (START WITH 1))".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`.{given, _} + testContext.run(op, batchSize) // + testContext.run(get).toSet mustEqual result.toSet + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids.toSet mustEqual productsOriginal.map(_.id).toSet + testContext.run(get).toSet mustEqual productsOriginal.toSet + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + inline def op = quote { + liftQuery(products).foreach(p => query[Product].insert(_.description -> lift("BlahBlah"), _.sku -> p.sku)) + } + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } +} \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..815928366 --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala @@ -0,0 +1,34 @@ +package io.getquill.context.jdbc.postgres + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + +class BatchValuesJdbcSpec extends BatchValuesSpec { + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(sql"TRUNCATE TABLE Product RESTART IDENTITY CASCADE".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`._ + testContext.run(op, batchSize) + testContext.run(get) mustEqual result + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids mustEqual expectedIds + testContext.run(get) mustEqual result + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + testContext.run(op, batchSize) + testContext.run(get) mustEqual result + } +} \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..e7753bbea --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala @@ -0,0 +1,35 @@ +package io.getquill.context.jdbc.sqlite + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + +class BatchValuesJdbcSpec extends BatchValuesSpec { + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(query[Product].delete) + //testContext.run(sql"DELETE FROM quill_test.sqlite_sequence WHERE name='Product';".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids mustEqual productsOriginal.map(_.id) + testContext.run(get) mustEqual productsOriginal + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + testContext.run(op, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } +} \ No newline at end of file diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala new file mode 100644 index 000000000..e98bf8be8 --- /dev/null +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala @@ -0,0 +1,48 @@ +package io.getquill.context.jdbc.sqlserver + +import io.getquill.context.sql.BatchValuesSpec +import io.getquill._ + + +class BatchValuesJdbcSpec extends BatchValuesSpec { // + + val context = testContext + import testContext._ + + override def beforeEach(): Unit = { + testContext.run(sql"TRUNCATE TABLE Product; DBCC CHECKIDENT ('Product', RESEED, 1)".as[Delete[Product]]) + super.beforeEach() + } + + "Ex 1 - Batch Insert Normal" in { + import `Ex 1 - Batch Insert Normal`._ + inline def splicedOp = opExt(insert => sql"SET IDENTITY_INSERT Product ON; ${insert}".as[Insert[Product]]) + testContext.run(splicedOp, batchSize) + testContext.run(get).toSet mustEqual result.toSet + } + + "Ex 2 - Batch Insert Returning" in { + import `Ex 2 - Batch Insert Returning`._ + val ids = testContext.run(op, batchSize) + ids mustEqual expectedIds + testContext.run(get) mustEqual result + } + + "Ex 2B - Batch Insert Returning - whole row" in { + import `Ex 2 - Batch Insert Returning`._ + inline given InsertMeta[Product] = insertMeta(_.id) + inline def op = quote { + liftQuery(products).foreach(p => query[Product].insertValue(p).returning(p => p)) + } + val ids = testContext.run(op, batchSize) + ids mustEqual productsOriginal + testContext.run(get) mustEqual productsOriginal + } + + "Ex 3 - Batch Insert Mixed" in { + import `Ex 3 - Batch Insert Mixed`._ + inline def splicedOp = opExt(insert => sql"SET IDENTITY_INSERT Product ON; ${insert}".as[Insert[Product]]) + testContext.run(splicedOp, batchSize) + testContext.run(get) mustEqual result + } +} \ No newline at end of file diff --git a/quill-sql/src/main/scala/io/getquill/MirrorContext.scala b/quill-sql/src/main/scala/io/getquill/MirrorContext.scala index 77583eb8a..abf7c7bf4 100644 --- a/quill-sql/src/main/scala/io/getquill/MirrorContext.scala +++ b/quill-sql/src/main/scala/io/getquill/MirrorContext.scala @@ -66,9 +66,13 @@ trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] @targetName("runActionReturningMany") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ActionReturningMirror[T, List[T]] = InternalApi.runActionReturningMany(quoted).asInstanceOf[ActionReturningMirror[T, List[T]]] @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): BatchActionMirror = InternalApi.runBatchAction(quoted) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): BatchActionMirror = InternalApi.runBatchAction(quoted, rowsPerBatch) + @targetName("runBatchActionDefault") + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): BatchActionMirror = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + @targetName("runBatchActionReturningDefault") + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, 1) override def executeQuery[T](string: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner) = QueryMirror(string, prepare(Row(), session)._2, extractor, info) diff --git a/quill-sql/src/main/scala/io/getquill/context/Context.scala b/quill-sql/src/main/scala/io/getquill/context/Context.scala index 95b232f83..d3e8a147b 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Context.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Context.scala @@ -115,7 +115,7 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def runQuery[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): Result[RunQueryResult[T]] = { val ca = make.op[Nothing, T, Result[RunQueryResult[T]]] { arg => val simpleExt = arg.extractor.requireSimple() - self.executeQuery(arg.sql, arg.prepare.head, simpleExt.extract)(arg.executionInfo, _summonRunner()) + self.executeQuery(arg.sql, arg.prepare, simpleExt.extract)(arg.executionInfo, _summonRunner()) } QueryExecution.apply(ca)(quoted, None, wrap) } @@ -123,14 +123,14 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def runQuerySingle[T](inline quoted: Quoted[T]): Result[RunQuerySingleResult[T]] = { val ca = make.op[Nothing, T, Result[RunQuerySingleResult[T]]] { arg => val simpleExt = arg.extractor.requireSimple() - self.executeQuerySingle(arg.sql, arg.prepare.head, simpleExt.extract)(arg.executionInfo, _summonRunner()) + self.executeQuerySingle(arg.sql, arg.prepare, simpleExt.extract)(arg.executionInfo, _summonRunner()) } QueryExecution.apply(ca)(QuerySingleAsQuery(quoted), None) } inline def runAction[E](inline quoted: Quoted[Action[E]]): Result[RunActionResult] = { val ca = make.op[E, Any, Result[RunActionResult]] { arg => - self.executeAction(arg.sql, arg.prepare.head)(arg.executionInfo, _summonRunner()) + self.executeAction(arg.sql, arg.prepare)(arg.executionInfo, _summonRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -139,7 +139,7 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] val ca = make.op[E, T, Result[RunActionReturningResult[T]]] { arg => // Need an extractor with special information that helps with the SQL returning specifics val returningExt = arg.extractor.requireReturning() - self.executeActionReturning(arg.sql, arg.prepare.head, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) + self.executeActionReturning(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -148,28 +148,28 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] val ca = make.op[E, T, Result[RunActionReturningResult[List[T]]]] { arg => // Need an extractor with special information that helps with the SQL returning specifics val returningExt = arg.extractor.requireReturning() - self.executeActionReturningMany(arg.sql, arg.prepare.head, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) + self.executeActionReturningMany(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) } QueryExecution.apply(ca)(quoted, None) } - inline def runBatchAction[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Result[RunBatchActionResult] = { + inline def runBatchAction[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Result[RunBatchActionResult] = { val ca = make.batch[I, Nothing, A, Result[RunBatchActionResult]] { arg => // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. - val group = BatchGroup(arg.sql, arg.prepare.toList) - self.executeBatchAction(List(group))(arg.executionInfo, _summonRunner()) + val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) + self.executeBatchAction(groups.toList)(arg.executionInfo, _summonRunner()) } - BatchQueryExecution.apply(ca)(quoted) + QueryExecutionBatch.apply(ca, rowsPerBatch)(quoted) } - inline def runBatchActionReturning[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): Result[RunBatchActionReturningResult[T]] = { + inline def runBatchActionReturning[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Result[RunBatchActionReturningResult[T]] = { val ca = make.batch[I, T, A, Result[RunBatchActionReturningResult[T]]] { arg => val returningExt = arg.extractor.requireReturning() // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. - val group = BatchGroupReturning(arg.sql, returningExt.returningBehavior, arg.prepare.toList) - self.executeBatchActionReturning[T](List(group), returningExt.extract)(arg.executionInfo, _summonRunner()) + val groups = arg.groups.map((sql, prepare) => BatchGroupReturning(sql, returningExt.returningBehavior, prepare)) + self.executeBatchActionReturning[T](groups.toList, returningExt.extract)(arg.executionInfo, _summonRunner()) } - BatchQueryExecution.apply(ca)(quoted) + QueryExecutionBatch.apply(ca, rowsPerBatch)(quoted) } end InternalApi diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala index 9fa3378aa..53e37446a 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala @@ -69,7 +69,7 @@ trait ContextVerbPrepare[+Dialect <: Idiom, +Naming <: NamingStrategy]: @targetName("runPrepareQuery") inline def prepare[T](inline quoted: Quoted[Query[T]]): PrepareQueryResult = { val ca = make.op[Nothing, T, PrepareQueryResult] { arg => - self.prepareQuery(arg.sql, arg.prepare.head)(arg.executionInfo, _summonPrepareRunner()) + self.prepareQuery(arg.sql, arg.prepare)(arg.executionInfo, _summonPrepareRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -80,7 +80,7 @@ trait ContextVerbPrepare[+Dialect <: Idiom, +Naming <: NamingStrategy]: @targetName("runPrepareAction") inline def prepare[E](inline quoted: Quoted[Action[E]]): PrepareActionResult = { val ca = make.op[E, Any, PrepareActionResult] { arg => - self.prepareAction(arg.sql, arg.prepare.head)(arg.executionInfo, _summonPrepareRunner()) + self.prepareAction(arg.sql, arg.prepare)(arg.executionInfo, _summonPrepareRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -88,9 +88,9 @@ trait ContextVerbPrepare[+Dialect <: Idiom, +Naming <: NamingStrategy]: @targetName("runPrepareBatchAction") inline def prepare[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): PrepareBatchActionResult = { val ca = make.batch[I, Nothing, A, PrepareBatchActionResult] { arg => - val group = BatchGroup(arg.sql, arg.prepare.toList) - self.prepareBatchAction(List(group))(arg.executionInfo, _summonPrepareRunner()) + val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) + self.prepareBatchAction(groups.toList)(arg.executionInfo, _summonPrepareRunner()) } - BatchQueryExecution.apply(ca)(quoted) + QueryExecutionBatch.apply(ca, 1)(quoted) } end ContextVerbPrepare diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala index 7c42b8c0f..ba1cca69c 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala @@ -59,7 +59,7 @@ trait ContextVerbStream[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingSt inline def _streamInternal[T](inline quoted: Quoted[Query[T]], fetchSize: Option[Int]): StreamResult[T] = { val ca = make.op[Nothing, T, StreamResult[T]] { arg => val simpleExt = arg.extractor.requireSimple() - self.streamQuery(arg.fetchSize, arg.sql, arg.prepare.head, simpleExt.extract)(arg.executionInfo, InternalApi._summonRunner()) + self.streamQuery(arg.fetchSize, arg.sql, arg.prepare, simpleExt.extract)(arg.executionInfo, InternalApi._summonRunner()) } QueryExecution.apply(ca)(quoted, fetchSize) } diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala index bfd2725e3..14cef6357 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala @@ -84,7 +84,7 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[T](inline quoted: Quoted[Query[T]], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[Nothing, T, TranslateResult[String]] { arg => val simpleExt = arg.extractor.requireSimple() - self.translateQueryEndpoint(arg.sql, arg.prepare.head, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -95,7 +95,7 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[T](inline quoted: Quoted[T], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[Nothing, T, TranslateResult[String]] { arg => val simpleExt = arg.extractor.requireSimple() - self.translateQueryEndpoint(arg.sql, arg.prepare.head, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } QueryExecution.apply(ca)(QuerySingleAsQuery(quoted), None) } @@ -105,7 +105,7 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] @targetName("translateAction") inline def translate[E](inline quoted: Quoted[Action[E]], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[E, Any, TranslateResult[String]] { arg => - self.translateQueryEndpoint(arg.sql, arg.prepare.head, prettyPrint = prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, prettyPrint = prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -116,7 +116,7 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[E, T](inline quoted: Quoted[ActionReturning[E, T]], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[E, T, TranslateResult[String]] { arg => val returningExt = arg.extractor.requireReturning() - self.translateQueryEndpoint(arg.sql, arg.prepare.head, returningExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, returningExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } QueryExecution.apply(ca)(quoted, None) } @@ -127,10 +127,10 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], inline prettyPrint: Boolean): TranslateResult[List[String]] = { val ca = make.batch[I, Nothing, A, TranslateResult[List[String]]] { arg => // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. - val group = BatchGroup(arg.sql, arg.prepare.toList) - self.translateBatchQueryEndpoint(List(group), prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) + self.translateBatchQueryEndpoint(groups.toList, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } - BatchQueryExecution.apply(ca)(quoted) + QueryExecutionBatch.apply(ca, 1)(quoted) } @targetName("translateBatchActionReturning") @@ -140,10 +140,10 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] val ca = make.batch[I, T, A, TranslateResult[List[String]]] { arg => val returningExt = arg.extractor.requireReturning() // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. - val group = BatchGroupReturning(arg.sql, returningExt.returningBehavior, arg.prepare.toList) - self.translateBatchQueryReturningEndpoint(List(group), prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + val groups = arg.groups.map((sql, prepare) => BatchGroupReturning(sql, returningExt.returningBehavior, prepare)) + self.translateBatchQueryReturningEndpoint(groups.toList, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) } - BatchQueryExecution.apply(ca)(quoted) + QueryExecutionBatch.apply(ca, 1)(quoted) } end ContextTranslateMacro diff --git a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala index 4f3413843..cb137e072 100644 --- a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala @@ -377,7 +377,7 @@ object InsertUpdateMacro { // we want to re-syntheize this as a lifted thing i.e. liftQuery(people).foreach(p => query[Person].insertValue(lift(p))) // and then reprocess the contents. // We don't want to do that here thought because we don't have the PrepareRow - // so we can't lift content here into planters. Instead this is done in the BatchQueryExecution pipeline + // so we can't lift content here into planters. Instead this is done in the QueryExecutionBatch pipeline case astIdent: AIdent => deduceAssignmentsFromIdent(astIdent) // Insertion could have lifts and quotes inside, need to extract those. diff --git a/quill-sql/src/main/scala/io/getquill/context/Particularize.scala b/quill-sql/src/main/scala/io/getquill/context/Particularize.scala index 6c52ce44c..4dafb508b 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Particularize.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Particularize.scala @@ -19,6 +19,8 @@ import scala.quoted._ import io.getquill.util.Format import io.getquill.metaprog.InjectableEagerPlanterExpr import io.getquill.parser.Lifter +import io.getquill.context.QueryExecutionBatchModel.SingleEntityLifts +import zio.Chunk /** * For a query that has a filter(p => liftQuery(List("Joe","Jack")).contains(p.name)) we need to turn @@ -57,6 +59,7 @@ object Particularize: case s: Statement => liftableStatement(s) case SetContainsToken(a, op, b) => '{ io.getquill.idiom.SetContainsToken(${ a.expr }, ${ op.expr }, ${ b.expr }) } case ScalarLiftToken(lift) => quotes.reflect.report.throwError("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") + case ValuesClauseToken(stmt) => '{ io.getquill.idiom.ValuesClauseToken(${ stmt.expr }) } given liftableStatement: BasicLiftable[Statement] with def lift = @@ -66,11 +69,17 @@ object Particularize: // the following should test for that: update - extra lift + scalars + liftQuery/setContains object Static: /** Convenience constructor for doing particularization from an Unparticular.Query */ - def apply[PrepareRowTemp: Type](query: Unparticular.Query, lifts: List[Expr[Planter[_, _, _]]], runtimeLiftingPlaceholder: Expr[Int => String], emptySetContainsToken: Expr[Token => Token])(using Quotes): Expr[String] = + def apply[PrepareRowTemp: Type]( + query: Unparticular.Query, + lifts: List[Expr[Planter[_, _, _]]], + runtimeLiftingPlaceholder: Expr[Int => String], + emptySetContainsToken: Expr[Token => Token], + valuesClauseRepeats: Expr[Int] + )(using Quotes): Expr[String] = import quotes.reflect._ val liftsExpr: Expr[List[Planter[?, ?, ?]]] = Expr.ofList(lifts) val queryExpr: Expr[Unparticular.Query] = UnparticularQueryLiftable(query) - '{ Dynamic[PrepareRowTemp]($queryExpr, $liftsExpr, $runtimeLiftingPlaceholder, $emptySetContainsToken) } + '{ Dynamic[PrepareRowTemp]($queryExpr, $liftsExpr, $runtimeLiftingPlaceholder, $emptySetContainsToken)._1 } end Static object Dynamic: @@ -79,14 +88,16 @@ object Particularize: query: Unparticular.Query, lifts: List[Planter[_, _, _]], liftingPlaceholder: Int => String, - emptySetContainsToken: Token => Token - ): String = - raw(query.realQuery, lifts, liftingPlaceholder, emptySetContainsToken) + emptySetContainsToken: Token => Token, + valuesClauseRepeats: Int = 1 + ): (String, LiftsOrderer) = + raw(query.realQuery, lifts, liftingPlaceholder, emptySetContainsToken, valuesClauseRepeats) - private[getquill] def raw[PrepareRowTemp, Session](statements: Statement, lifts: List[Planter[_, _, _]], liftingPlaceholder: Int => String, emptySetContainsToken: Token => Token): String = { + private[getquill] def raw[PrepareRowTemp, Session](statements: Statement, lifts: List[Planter[_, _, _]], liftingPlaceholder: Int => String, emptySetContainsToken: Token => Token, valuesClauseRepeats: Int): (String, LiftsOrderer) = { enum LiftChoice: case ListLift(value: EagerListPlanter[Any, PrepareRowTemp, Session]) case SingleLift(value: Planter[Any, PrepareRowTemp, Session]) + case InjectableLift(value: Planter[Any, PrepareRowTemp, Session]) val listLifts = lifts.collect { case e: EagerListPlanter[_, _, _] => e.asInstanceOf[EagerListPlanter[Any, PrepareRowTemp, Session]] }.map(lift => (lift.uid, lift)).toMap val singleLifts = lifts.collect { case e: EagerPlanter[_, _, _] => e.asInstanceOf[EagerPlanter[Any, PrepareRowTemp, Session]] }.map(lift => (lift.uid, lift)).toMap @@ -95,60 +106,175 @@ object Particularize: def getLifts(uid: String): LiftChoice = listLifts.get(uid).map(LiftChoice.ListLift(_)) .orElse(singleLifts.get(uid).map(LiftChoice.SingleLift(_))) - .orElse(injectableLifts.get(uid).map(LiftChoice.SingleLift(_))) + .orElse(injectableLifts.get(uid).map(LiftChoice.InjectableLift(_))) .getOrElse { throw new IllegalArgumentException(s"Cannot find list-lift with UID ${uid} (from all the lifts ${lifts})") } // TODO Also need to account for empty tokens but since we actually have a reference to the list can do that directly - def placeholders(uid: String, initialIndex: Int): (Int, String) = - getLifts(uid) match + def placeholders(uid: String, initialIndex: Int): (Int, String, LiftChoice) = + val liftChoiceKind = getLifts(uid) + liftChoiceKind match case LiftChoice.ListLift(lifts) => // using index 1 since SQL prepares start with $1 typically val liftsPlaceholder = lifts.values.zipWithIndex.map((_, index) => liftingPlaceholder(index + initialIndex)).mkString(", ") val liftsLength = lifts.values.length - (liftsLength, liftsPlaceholder) + (liftsLength, liftsPlaceholder, liftChoiceKind) case LiftChoice.SingleLift(lift) => - (1, liftingPlaceholder(initialIndex)) + (1, liftingPlaceholder(initialIndex), liftChoiceKind) + case LiftChoice.InjectableLift(lift) => + (1, liftingPlaceholder(initialIndex), liftChoiceKind) def isEmptyListLift(uid: String) = getLifts(uid) match case LiftChoice.ListLift(lifts) => lifts.values.isEmpty case _ => false - def token2String(token: Token): String = { + trait Work + case class Item(token: io.getquill.idiom.Token) extends Work + case class SetValueClauseNum(num: Int) extends Work + case class DoneValueClauseNum(num: Int, isLast: Boolean) extends Work + + def token2String(token: io.getquill.idiom.Token): (String, LiftsOrderer) = { + // println(s"====== Tokenization for query: ${io.getquill.util.Messages.qprint(token)}") @tailrec def apply( - workList: List[Token], - sqlResult: Seq[String], - placeholderIndex: Int // I.e. the index of the '?' that is inserted in the query (that represents a lift) - ): String = workList match { - case Nil => sqlResult.reverse.foldLeft("")((concatonation, nextExpr) => concatonation + nextExpr) - case head :: tail => + workList: Chunk[Work], + sqlResult: Chunk[String], + lifts: Chunk[LiftSlot], + liftsCount: Int, // I.e. the index of the '?' that is inserted in the query (that represents a lift) + valueClausesIndex: Int + ): (String, LiftsOrderer) = { + // Completed all work + if (workList.isEmpty) { + val query = sqlResult.foldLeft("")((concatonation, nextExpr) => concatonation + nextExpr) + (query, LiftsOrderer(lifts.toList)) + } else { + val head = workList.head + val tail = workList.tail head match { - case StringToken(s2) => apply(tail, s2 +: sqlResult, placeholderIndex) - case SetContainsToken(a, op, b) => + case Item(StringToken(s2)) => apply(tail, sqlResult :+ s2, lifts, liftsCount, valueClausesIndex) + case Item(SetContainsToken(a, op, b)) => b match case ScalarTagToken(tag) if isEmptyListLift(tag.uid) => - apply(emptySetContainsToken(a) +: tail, sqlResult, placeholderIndex) + apply(Item(emptySetContainsToken(a)) +: tail, sqlResult, lifts, liftsCount, valueClausesIndex) case _ => - apply(stmt"$a $op ($b)" +: tail, sqlResult, placeholderIndex) - case ScalarTagToken(tag) => - val (liftsLength, lifts) = placeholders(tag.uid, placeholderIndex) - apply(tail, lifts +: sqlResult, placeholderIndex + liftsLength) - case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, placeholderIndex) - case _: ScalarLiftToken => + apply(Item(stmt"$a $op ($b)") +: tail, sqlResult, lifts, liftsCount, valueClausesIndex) + case Item(ScalarTagToken(tag)) => + val (liftsLength, liftPlaceholders, liftChoice) = placeholders(tag.uid, liftsCount) + val newLift = + liftChoice match + case LiftChoice.InjectableLift(_) => + LiftSlot.makeNumbered(valueClausesIndex, tag) + case _ => + // println(s"Making Normal Lift ${tag.uid}") + LiftSlot.makePlain(tag) + + apply(tail, sqlResult :+ liftPlaceholders, lifts :+ newLift, liftsCount + liftsLength, valueClausesIndex) + case Item(ValuesClauseToken(stmt)) => + val repeatedClauses = + (0 until valuesClauseRepeats) + .toChunk + .mapWithHasNext((i, hasNext) => List(SetValueClauseNum(i), Item(stmt), DoneValueClauseNum(i, !hasNext))) + .flatten + + // println(s"=== Instructions for releated clauses: ${repeatedClauses}") + apply(repeatedClauses ++ tail, sqlResult, lifts, liftsCount, valueClausesIndex) + case Item(Statement(tokens)) => + apply(tokens.toChunk.map(Item(_)) ++ tail, sqlResult, lifts, liftsCount, valueClausesIndex) + case Item(_: ScalarLiftToken) => throw new UnsupportedOperationException("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") - case _: QuotationTagToken => + case Item(_: QuotationTagToken) => throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.") + case SetValueClauseNum(num) => + // println(s"Setting value clause: ${num}") + apply(tail, sqlResult, lifts, liftsCount, num) + case DoneValueClauseNum(num, isLast) => + // println(s"Finished value clause: ${num}") + val reaminingWork = + if (!isLast) + Item(stmt", ") +: tail + else + tail + apply(reaminingWork, sqlResult, lifts, liftsCount, num) } + } } - apply(List(token), Seq(), 0) + apply(Chunk.single(Item(token)), Chunk.empty, Chunk.empty, 0, 0) } token2String(statements) } + + private implicit class IterableExtensions[A](list: Iterable[A]) extends AnyVal { + def toChunk[A] = Chunk.fromIterable(list) + } + // TODO Need to test + private implicit class ChunkExtensions[A](val as: Chunk[A]) extends AnyVal { + def mapWithHasNext[B](f: (A, Boolean) => B): Chunk[B] = { + val b = Chunk.newBuilder[B] + val it = as.iterator + if (it.hasNext) { + b += f(it.next(), it.hasNext) + while (it.hasNext) { + b += f(it.next(), it.hasNext) + } + } + b.result() + } + } end Dynamic + case class LiftSlot(rank: LiftSlot.Rank, external: ScalarTag) + object LiftSlot { + enum Rank: + case Numbered(num: Int) // for values-clauses + case Universal // for regular lifts + def makePlain(lift: ScalarTag) = LiftSlot(Rank.Universal, lift) + def makeNumbered(number: Int, lift: ScalarTag) = LiftSlot(Rank.Numbered(number), lift) + object Numbered: + def unapply(liftSlot: LiftSlot) = + liftSlot match + case LiftSlot(Rank.Numbered(num), ScalarTag(uid)) => Some((num, uid)) + case _ => None + object Plain: + def unapply(liftSlot: LiftSlot) = + liftSlot match + case LiftSlot(Rank.Universal, ScalarTag(uid)) => Some((uid)) + case _ => None + } + + case class LiftsOrderer(slots: List[LiftSlot]) { + case class ValueLiftKey(i: Int, uid: String) + def orderLifts(valueClauseLifts: List[SingleEntityLifts], regularLifts: List[Planter[?, ?, ?]]) = { + val valueClauseLiftIndexes = + valueClauseLifts + .zipWithIndex + .flatMap((entity, i) => + entity.lifts.map(lift => ValueLiftKey(i, lift.uid) -> lift) + ) + .toMap + val regularLiftIndexes = + regularLifts.map(lift => (lift.uid, lift)).toMap + + // println(s"===== Organizing into Lift Slots: ${slots}") + slots.map { + case LiftSlot.Numbered(valueClauseNum, uid) => + valueClauseLiftIndexes + .get(ValueLiftKey(valueClauseNum, uid)) + .getOrElse { + throw new IllegalStateException(s"Could not find the Value-Clause lift index:${valueClauseNum},uid:${uid}. Existing values are: ${valueClauseLiftIndexes}") + } + case LiftSlot.Plain(uid) => + regularLiftIndexes + .get(uid) + .getOrElse { + throw new IllegalStateException(s"Could not find the lift uid:${uid},uid:${uid}. Existing values are: ${regularLiftIndexes}") + } + case other => + throw new IllegalStateException(s"Illegal LiftSlot: ${other}") + } + } + } end Particularize diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala index 0ae757b39..9d3970774 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala @@ -51,24 +51,32 @@ import io.getquill.metaprog.SummonTranspileConfig import io.getquill.idiom.Token object ContextOperation: - case class Argument[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + case class SingleArgument[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( sql: String, - prepare: Array[(PrepareRow, Session) => (List[Any], PrepareRow)], + prepare: (PrepareRow, Session) => (List[Any], PrepareRow), extractor: Extraction[ResultRow, Session, T], executionInfo: ExecutionInfo, fetchSize: Option[Int] ) + case class BatchArgument[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + groups: List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])], + extractor: Extraction[ResultRow, Session, T], + executionInfo: ExecutionInfo, + fetchSize: Option[Int] + ) + case class Single[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res](val idiom: D, val naming: N)( + val execute: (ContextOperation.SingleArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res + ) + case class Batch[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res](val idiom: D, val naming: N)( + val execute: (ContextOperation.BatchArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res + ) case class Factory[D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _]](val idiom: D, val naming: N): def op[I, T, Res] = - ContextOperation[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res](idiom, naming) + ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res](idiom, naming) def batch[I, T, A <: QAC[I, T] with Action[I], Res] = - ContextOperation[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res](idiom, naming) + ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res](idiom, naming) -case class ContextOperation[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res](val idiom: D, val naming: N)( - val execute: (ContextOperation.Argument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res -) - -/** Enums and helper methods for QueryExecution and BatchQueryExecution */ +/** Enums and helper methods for QueryExecution and QueryExecutionBatch */ object Execution: enum ExtractBehavior: @@ -167,7 +175,7 @@ object QueryExecution: Res: Type ]( quotedOp: Expr[Quoted[QAC[_, _]]], - contextOperation: Expr[ContextOperation[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + contextOperation: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], fetchSize: Expr[Option[Int]], wrap: Expr[OuterSelectWrap] )(using val qctx: Quotes, QAC: Type[QAC[_, _]]): @@ -297,12 +305,12 @@ object QueryExecution: val emptyContainsTokenExpr: Expr[Token => Token] = '{ $contextOperation.idiom.emptySetContainsToken(_) } val liftingPlaceholderExpr: Expr[Int => String] = '{ $contextOperation.idiom.liftingPlaceholder } - val particularQuery = Particularize.Static(state.query, lifts, liftingPlaceholderExpr, emptyContainsTokenExpr) + val particularQuery = Particularize.Static(state.query, lifts, liftingPlaceholderExpr, emptyContainsTokenExpr, '{ 1 }) // Plug in the components and execute val astSplice = if (TypeRepr.of[Ctx] <:< TypeRepr.of[AstSplicing]) Lifter(state.ast) else '{ io.getquill.ast.NullValue } - '{ $contextOperation.execute(ContextOperation.Argument($particularQuery, Array($prepare), $extractor, ExecutionInfo(ExecutionType.Static, $astSplice, ${ Lifter.quat(topLevelQuat) }), $fetchSize)) } + '{ $contextOperation.execute(ContextOperation.SingleArgument($particularQuery, $prepare, $extractor, ExecutionInfo(ExecutionType.Static, $astSplice, ${ Lifter.quat(topLevelQuat) }), $fetchSize)) } end executeStatic /** @@ -356,7 +364,7 @@ object QueryExecution: N <: NamingStrategy, Ctx <: Context[_, _], Res - ](ctx: ContextOperation[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res])(inline quotedOp: Quoted[QAC[_, _]], fetchSize: Option[Int], inline wrap: OuterSelectWrap = OuterSelectWrap.Default) = + ](ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res])(inline quotedOp: Quoted[QAC[_, _]], fetchSize: Option[Int], inline wrap: OuterSelectWrap = OuterSelectWrap.Default) = ${ applyImpl('quotedOp, 'ctx, 'fetchSize, 'wrap) } def applyImpl[ @@ -372,7 +380,7 @@ object QueryExecution: Res: Type ]( quotedOp: Expr[Quoted[QAC[_, _]]], - ctx: Expr[ContextOperation[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + ctx: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], fetchSize: Expr[Option[Int]], wrap: Expr[OuterSelectWrap] )(using qctx: Quotes): Expr[Res] = new RunQuery[I, T, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quotedOp, ctx, fetchSize, wrap).apply() @@ -478,8 +486,7 @@ object PrepareDynamicExecution: case (Extraction.None, None) => rawExtractor case (extractor, returningAction) => throw new IllegalArgumentException(s"Invalid state. Cannot have ${extractor} with a returning action ${returningAction}") - // Turn the Tokenized AST into an actual string and pull out the ScalarTags (i.e. the lifts) - val (unparticularQuery, externals) = Unparticular.Query.fromStatement(stmt, idiom.liftingPlaceholder) + val (_, externals) = Unparticular.Query.fromStatement(stmt, idiom.liftingPlaceholder) // Get the UIDs from the lifts, if they are something unexpected (e.g. Lift elements from Quill 2.x) throw an exception val liftTags = @@ -488,8 +495,6 @@ object PrepareDynamicExecution: case other => throw new IllegalArgumentException(s"Invalid Lift Tag: ${other}") } - val queryString = Particularize.Dynamic(unparticularQuery, gatheredLifts ++ additionalLifts, idiom.liftingPlaceholder, idiom.emptySetContainsToken) - // Match the ScalarTags we pulled out earlier (in ReifyStatement) with corresponding Planters because // the Planters can be out of order (I.e. in a different order then the ?s in the SQL query that they need to be spliced into). // The ScalarTags are comming directly from the tokenized AST however and their order should be correct. @@ -505,7 +510,7 @@ object PrepareDynamicExecution: s"Due to an error: $msg" ) - (queryString, outputAst, sortedLifts, extractor, sortedSecondaryLifts) + (stmt, outputAst, sortedLifts, extractor, sortedSecondaryLifts) end apply @@ -619,7 +624,7 @@ object RunDynamicExecution: Res ]( quoted: Quoted[QAC[I, RawT]], - ctx: ContextOperation[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res], + ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res], rawExtractor: Extraction[ResultRow, Session, T], spliceAst: Boolean, fetchSize: Option[Int], @@ -628,15 +633,20 @@ object RunDynamicExecution: transpileConfig: TranspileConfig ): Res = { // println("===== Passed Ast: " + io.getquill.util.Messages.qprint(quoted.ast)) - val (queryString, outputAst, sortedLifts, extractor, _) = + val (stmt, outputAst, sortedLifts, extractor, sortedSecondaryLifts) = PrepareDynamicExecution[I, T, RawT, D, N, PrepareRow, ResultRow, Session](quoted, rawExtractor, ctx.idiom, ctx.naming, elaborationBehavior, topLevelQuat, transpileConfig) + // Turn the Tokenized AST into an actual string and pull out the ScalarTags (i.e. the lifts) + val (unparticularQuery, _) = Unparticular.Query.fromStatement(stmt, ctx.idiom.liftingPlaceholder) + // TODO don't really need lift-sorting in PrepareDynamicExecution anymore? Could use liftsOrderer to do that + val (queryString, _) = Particularize.Dynamic(unparticularQuery, sortedLifts ++ sortedSecondaryLifts, ctx.idiom.liftingPlaceholder, ctx.idiom.emptySetContainsToken) + // Use the sortedLifts to prepare the method that will prepare the SQL statement val prepare = (row: PrepareRow, session: Session) => LiftsExtractor.Dynamic[PrepareRow, Session](sortedLifts, row, session) // Exclute the SQL Statement val executionAst = if (spliceAst) outputAst else io.getquill.ast.NullValue - ctx.execute(ContextOperation.Argument(queryString, Array(prepare), extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), fetchSize)) + ctx.execute(ContextOperation.SingleArgument(queryString, prepare, extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), fetchSize)) } end RunDynamicExecution diff --git a/quill-sql/src/main/scala/io/getquill/context/BatchQueryExecution.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala similarity index 67% rename from quill-sql/src/main/scala/io/getquill/context/BatchQueryExecution.scala rename to quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala index 1f48416c4..7b2da3f2c 100644 --- a/quill-sql/src/main/scala/io/getquill/context/BatchQueryExecution.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala @@ -2,8 +2,6 @@ package io.getquill.context import scala.language.higherKinds import scala.language.experimental.macros -//import io.getquill.generic.Dsl -//import io.getquill.util.Messages.fail import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try @@ -60,23 +58,49 @@ import io.getquill.metaprog.SummonTranspileConfig import io.getquill.norm.TranspileConfig import io.getquill.metaprog.TranspileConfigLiftable import io.getquill.idiom.Token +import scala.annotation.nowarn private[getquill] enum BatchActionType: case Insert case Update case Delete +/** + * In some cases the action that goes inside the batch needs an infix. For example, for SQL server + * to be able to do batch inserts of rows with IDs you need to do something like: + * {{ + * liftQuery(products).foreach(p => + * sql"SET IDENTITY_INSERT Product ON; ${query[Product].insertValue(p)}".as[Insert[Int]]) + * }} + * In order to yield something like this: + * {{ + * SET IDENTITY_INSERT Product ON; INSERT INTO Product (id,description,sku) VALUES (?, ?, ?) + * }} + * Otherwise SQLServer will not let you insert the row because `IDENTITY_INSERT` will be off. + */ +object PossiblyInfixAction: + private def isTailAction(actionAst: Ast) = + actionAst.isInstanceOf[ast.Insert] || actionAst.isInstanceOf[ast.Update] || actionAst.isInstanceOf[ast.Delete] + private def hasOneAction(params: List[Ast]) = + params.filter(isTailAction(_)).length == 1 + def unapply(actionAst: ast.Ast): Option[Ast] = + actionAst match + case ast.Infix(parts, params, _, _, _) if (hasOneAction(params)) => + params.find(isTailAction(_)) + case _ if (isTailAction(actionAst)) => Some(actionAst) + case _ => None + private[getquill] object ActionEntity: def unapply(actionAst: Ast): Option[BatchActionType] = actionAst match - case ast.Insert(entity, _) => Some(BatchActionType.Insert) - case ast.Update(entity, assignments) => Some(BatchActionType.Update) - case ast.Delete(entity) => Some(BatchActionType.Delete) - case _ => None + case PossiblyInfixAction(ast.Insert(entity, _)) => Some(BatchActionType.Insert) + case PossiblyInfixAction(ast.Update(entity, assignments)) => Some(BatchActionType.Update) + case PossiblyInfixAction(ast.Delete(entity)) => Some(BatchActionType.Delete) + case _ => None object PrepareBatchComponents: import Execution._ - import BatchQueryExecutionModel._ + import QueryExecutionBatchModel._ def apply[I, PrepareRow](unliftedAst: Ast, foreachIdentAst: ast.Ast, extractionBehavior: BatchExtractBehavior): Either[String, (Ast, BatchActionType)] = { // putting this in a block since I don't want to externally import these packages @@ -147,7 +171,7 @@ object PrepareBatchComponents: } end PrepareBatchComponents -object BatchQueryExecutionModel: +object QueryExecutionBatchModel: import Execution._ type BatchExtractBehavior = ExtractBehavior.Skip.type | ExtractBehavior.ExtractWithReturnAction.type given ToExpr[BatchExtractBehavior] with @@ -155,168 +179,20 @@ object BatchQueryExecutionModel: behavior match case _: ExtractBehavior.Skip.type => '{ ExtractBehavior.Skip } case _: ExtractBehavior.ExtractWithReturnAction.type => '{ ExtractBehavior.ExtractWithReturnAction } - -object DynamicBatchQueryExecution: - import BatchQueryExecutionModel._ - import PrepareDynamicExecution._ - - extension [T](element: Either[String, T]) - def rightOrException() = - element match - case Right(value) => value - case Left(error) => throw new IllegalArgumentException(error) - - // NOTE We don't need to process secondary planters anymore because that list is not being used. - // It is handled by the static state. Can removing everything having to do with secondary planters list in a future PR. - sealed trait PlanterKind - object PlanterKind: - case class PrimaryEntitiesList(planter: EagerEntitiesPlanter[?, ?, ?]) extends PlanterKind - case class PrimaryScalarList(planter: EagerListPlanter[?, ?, ?]) extends PlanterKind - case class Other(planter: Planter[?, ?, ?]) extends PlanterKind - - def organizePlanters(planters: List[Planter[?, ?, ?]]) = - planters.foldLeft((Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other])) { - case ((None, list), planter: EagerEntitiesPlanter[?, ?, ?]) => - val planterKind = PlanterKind.PrimaryEntitiesList(planter) - (Some(planterKind), list) - case ((None, list), planter: EagerListPlanter[?, ?, ?]) => - val planterKind = PlanterKind.PrimaryScalarList(planter) - (Some(planterKind), list) - case ((primary @ Some(_), list), planter) => - (primary, list :+ PlanterKind.Other(planter)) - // this means we haven't found the primary planter yet (don't think this can happen because nothing can be before liftQuery), keep going - case ((primary @ None, list), planter) => - throw new IllegalArgumentException("Invalid planter traversal") - } match { - case (Some(primary), categorizedPlanters) => (primary, categorizedPlanters) - case (None, _) => throw new IllegalArgumentException(s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters}") - } - - def extractPrimaryComponents[I, PrepareRow, Session]( - primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, - ast: Ast, - extractionBehavior: BatchQueryExecutionModel.BatchExtractBehavior - ) = - primaryPlanter match - // In the case of liftQuery(entities) - case PlanterKind.PrimaryEntitiesList(planter) => - val (actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior).rightOrException() - (actionQueryAst, batchActionType, planter.fieldGetters.asInstanceOf[List[InjectableEagerPlanter[?, PrepareRow, Session]]]) - // In the case of liftQuery(scalars) - // Note, we could have potential other liftQuery(scalars) later in the query for example: - // liftQuery(List("Joe","Jack","Jill")).foreach(query[Person].filter(name => liftQuery(1,2,3 /*ids of Joe,Jack,Jill respectively*/).contains(p.id)).update(_.name -> name)) - // Therefore we cannot assume that there is only one - case PlanterKind.PrimaryScalarList(planter) => - val uuid = java.util.UUID.randomUUID.toString - val (foreachReplacementAst, perRowLift) = - (ScalarTag(uuid), InjectableEagerPlanter((t: Any) => t, planter.encoder.asInstanceOf[io.getquill.generic.GenericEncoder[Any, PrepareRow, Session]], uuid)) - // create the full batch-query Ast using the value of actual query of the batch statement i.e. I in: - // liftQuery[...](...).foreach(p => query[I].insertValue(p)) - val (actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior).rightOrException() - // return the combined batch components - (actionQueryAst, batchActionType, List(perRowLift)) - - def apply[ - I, - T, - A <: QAC[I, T] & Action[I], - ResultRow, - PrepareRow, - Session, - D <: Idiom, - N <: NamingStrategy, - Ctx <: Context[_, _], - Res - ]( - quotedRaw: Quoted[BatchAction[A]], - batchContextOperation: ContextOperation[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], - extractionBehavior: BatchExtractBehavior, - rawExtractor: Extraction[ResultRow, Session, T], - topLevelQuat: Quat, - transpileConfig: TranspileConfig - ) = { - // since real quotation could possibly be nested, need to get all splice all quotes and get all lifts in all runtimeQuote sections first - val ast = spliceQuotations(quotedRaw) - val lifts = gatherLifts(quotedRaw) - - // println(s"===== Spliced Ast: ====\n${io.getquill.util.Messages.qprint(ast)}") - // println(s"===== Initial Lifts: ====\n${io.getquill.util.Messages.qprint(lifts)}") - - // Given: Person(name, age) - // For the query: - // liftQuery(List(Person("Joe", 123))).foreach(p => query[Person].insertValue(p)) - // it would be (CaseClass(name->lift(A), age->lift(B)), BatchActionType.Insert, List(InjectableEagerLift(A), InjectableEagerLift(B)))) - // Same thing regardless of what kind of object is in the insert: - // liftQuery(List("foo")).foreach(name => query[Person].update(_.name -> name)) - // it would be (CaseClass(name->lift(A), age->lift(B)), BatchActionType.Update, List(InjectableEagerLift(A), InjectableEagerLift(B)))) - // - // That is why it is important to find the actual EagerEntitiesPlanterExpr (i.e. the part defined by `query[Person]`). That - // way we know the actual entity that needs to be lifted. - val (primaryPlanter, categorizedPlanters) = organizePlanters(lifts) - - // Use some custom functionality in the lift macro to prepare the case class an injectable lifts - // e.g. if T is Person(name: String, age: Int) and we do liftQuery(people:List[Person]).foreach(p => query[Person].insertValue(p)) - // Then: - // ast = CaseClass(name -> lift(UUID1), age -> lift(UUID2)) // NOTE: lift in the AST means a ScalarTag - // lifts = List(InjectableEagerLift(p.name, UUID1), InjectableEagerLift(p.age, UUID2)) - // e.g. if T is String and we do liftQuery(people:List[String]).foreach(p => query[Person].insertValue(Person(p, 123))) - // Then: - // ast = lift(UUID1) // I.e. ScalarTag(UUID1) since lift in the AST means a ScalarTag - // lifts = List(InjectableEagerLift(p, UUID1)) - val (actionQueryAst, batchActionType, perRowLifts) = extractPrimaryComponents[I, PrepareRow, Session](primaryPlanter, ast, extractionBehavior) - - // equivalent to static expandQuotation result - val dynamicExpandedQuotation = - batchActionType match - case BatchActionType.Insert => Quoted[Insert[I]](actionQueryAst, perRowLifts, Nil) // Already gathered queries and lifts from sub-clauses, don't need them anymore - case BatchActionType.Update => Quoted[Update[I]](actionQueryAst, perRowLifts, Nil) - // We need lifts for 'Delete' because it could have a WHERE clause - case BatchActionType.Delete => Quoted[Delete[I]](actionQueryAst, perRowLifts, Nil) - - val (queryString, outputAst, sortedLifts, extractor, sortedSecondaryLifts) = - PrepareDynamicExecution[I, T, T, D, N, PrepareRow, ResultRow, Session]( - dynamicExpandedQuotation, - rawExtractor, - batchContextOperation.idiom, - batchContextOperation.naming, - ElaborationBehavior.Skip, - topLevelQuat, - transpileConfig, - SpliceBehavior.AlreadySpliced, - categorizedPlanters.map(_.planter) - ) - - def expandLiftQueryMembers(filteredPerRowLifts: List[Planter[?, ?, ?]], entities: Iterable[?]) = - entities.map { entity => - sortedLifts.asInstanceOf[List[InjectableEagerPlanter[_, _, _]]].map(lift => lift.withInject(entity)) - } - - // Get the planters needed for every element lift (see primaryPlanterLifts in BatchStatic for more detail) - val primaryPlanterLifts = - primaryPlanter match - case PlanterKind.PrimaryEntitiesList(entitiesPlanter) => - expandLiftQueryMembers(sortedLifts, entitiesPlanter.value).toList - case PlanterKind.PrimaryScalarList(scalarsPlanter) => - expandLiftQueryMembers(sortedLifts, scalarsPlanter.values).toList - - // Get other lifts that are needed (again, see primaryPlanterLifts in BatchStatic for more detail). Then combine them - val combinedPlanters = - primaryPlanterLifts.map(perEntityPlanters => perEntityPlanters ++ sortedSecondaryLifts) - - val prepares = - combinedPlanters.map(perRowLifts => - (row: PrepareRow, session: Session) => - LiftsExtractor.Dynamic[PrepareRow, Session](perRowLifts, row, session) - ) - - val spliceAst = false - val executionAst = if (spliceAst) outputAst else io.getquill.ast.NullValue - batchContextOperation.execute(ContextOperation.Argument(queryString, prepares.toArray, extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), None)) - } - -object BatchQueryExecution: + case class SingleEntityLifts(lifts: List[Planter[?, ?, ?]]) + + enum BatchingBehavior: + // Normal behavior for most databases/contexts + case SingleRowPerBatch + // Postgres/SQLServer/H2, etc.. support multiple-row-per-query inserts via VALUES clauses + // this is a significant optimization over JDBC's PreparedStatement.addBatch/executeBatch. + // (The latter which usually don't amount to much better over just single-row actions.) + case MultiRowsPerBatch(numRows: Int) +end QueryExecutionBatchModel + +object QueryExecutionBatch: import Execution._ - import BatchQueryExecutionModel.{_, given} + import QueryExecutionBatchModel.{_, given} private[getquill] class RunQuery[ I: Type, @@ -329,11 +205,21 @@ object BatchQueryExecution: N <: NamingStrategy: Type, Ctx <: Context[_, _], Res: Type - ](quotedRaw: Expr[Quoted[BatchAction[A]]], batchContextOperation: Expr[ContextOperation[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]])(using Quotes, Type[Ctx]): + ](quotedRaw: Expr[Quoted[BatchAction[A]]], batchContextOperation: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], rowsPerQuery: Expr[Int])(using Quotes, Type[Ctx]): import quotes.reflect._ val topLevelQuat = QuatMaking.ofType[T] + // TODO Verify batching is >0 + lazy val batchingBehavior = '{ + // Do a widening to `Int` otherwise when 1 is passed into the rowsPerQuery argument + // scala things that it's a constant value hence it raises as "Error Unreachable" + // for the 2nd part of this case-match. + ($rowsPerQuery: Int) match + case 1 => BatchingBehavior.SingleRowPerBatch + case other => BatchingBehavior.MultiRowsPerBatch(other) + } + def extractionBehavior: BatchExtractBehavior = Type.of[A] match case '[QAC[I, Nothing]] => ExtractBehavior.Skip @@ -370,14 +256,15 @@ object BatchQueryExecution: val extractor = MakeExtractor[ResultRow, Session, T, T].dynamic(identityConverter, extractionBehavior) val transpileConfig = SummonTranspileConfig() '{ - DynamicBatchQueryExecution.apply[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]( + QueryExecutionBatchDynamic.apply[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]( $quotedRaw, $batchContextOperation, $extractionBehaviorExpr, $extractor, // / For the sake of viewing/debugging the quat macro code it is better not to serialize it here ${ Lifter.NotSerializing.quat(topLevelQuat) }, - ${ TranspileConfigLiftable(transpileConfig) } + ${ TranspileConfigLiftable(transpileConfig) }, + $batchingBehavior ) } @@ -394,7 +281,7 @@ object BatchQueryExecution: val comps = BatchStatic[I, PrepareRow, Session](unliftedAst, planters, extractionBehavior) val expandedQuotation = expandQuotation(comps.actionQueryAst, comps.batchActionType, comps.perRowLifts) - def expandLiftQueryMembers(filteredPerRowLifts: List[PlanterExpr[?, ?, ?]], entities: Expr[Iterable[?]]) = + def expandLiftQueryMembers(filteredPerRowLifts: List[InjectableEagerPlanterExpr[?, ?, ?]], entities: Expr[Iterable[?]]) = '{ $entities.map(entity => ${ @@ -408,7 +295,7 @@ object BatchQueryExecution: // we need a pre-filtered, and ordered list of lifts. The StaticTranslationMacro interanally has done that so we can take the lifts from there although they need to be casted. // This is safe because they are just the lifts taht we have already had from the `injectableLifts` list // TODO If all the lists are not InjectableEagerPlanterExpr, then we need to find out which ones are not and not inject them - val injectedLifts = filteredPerRowLifts.asInstanceOf[List[InjectableEagerPlanterExpr[_, _, _]]].map(lift => lift.inject('entity)) + val injectedLifts = filteredPerRowLifts.map(lift => lift.inject('entity)) val injectedLiftsExpr = Expr.ofList(injectedLifts) // val prepare = '{ (row: PrepareRow, session: Session) => LiftsExtractor.apply[PrepareRow, Session]($injectedLiftsExpr, row, session) } // prepare @@ -418,8 +305,9 @@ object BatchQueryExecution: } StaticTranslationMacro[D, N](expandedQuotation, ElaborationBehavior.Skip, topLevelQuat, comps.categorizedPlanters.map(_.planter)) match - case Some(state @ StaticState(query, filteredPerRowLifts, _, _, secondaryLifts)) => + case Some(state @ StaticState(query, filteredPerRowLiftsRaw, _, _, secondaryLifts)) => // create an extractor for returning actions + val filteredPerRowLifts = filteredPerRowLiftsRaw.asInstanceOf[List[InjectableEagerPlanterExpr[_, _, _]]] val extractor = MakeExtractor[ResultRow, Session, T, T].static(state, identityConverter, extractionBehavior) // In an expression we could have a whole bunch of different lifts @@ -439,11 +327,11 @@ object BatchQueryExecution: comps.primaryPlanter match case BatchStatic.PlanterKind.PrimaryEntitiesList(entitiesPlanter) => val exp = expandLiftQueryMembers(filteredPerRowLifts, entitiesPlanter.expr) - '{ $exp.toList } + '{ $exp.map(SingleEntityLifts(_)).toList } case BatchStatic.PlanterKind.PrimaryScalarList(scalarsPlanter) => val exp = expandLiftQueryMembers(filteredPerRowLifts, scalarsPlanter.expr) - '{ $exp.toList } + '{ $exp.map(SingleEntityLifts(_)).toList } // At this point here is waht the lifts look like: // List( @@ -455,29 +343,40 @@ object BatchQueryExecution: // List(lift(Joe.name), lift(Joe.age)), lift(somethingElse) <- per-entity lifts of Joe // List(lift(Jim.name), lift(Jim.age)), lift(somethingElse) <- per-entity lifts of Jim // ) - val otherPlanters = - Expr.ofList(secondaryLifts.map(_.plant)) - val combinedPlanters = - '{ $primaryPlanterLifts.map(perEntityPlanters => perEntityPlanters ++ $otherPlanters) } + + // case class SingleQueryPlanters() + + val otherPlanters = Expr.ofList(secondaryLifts.map(_.plant)) // println(s"============= Other Planters ===========\n${Format.Expr(otherPlanters)} ") // println(s"============= Combined Planters ===========\n${Format.Expr(combinedPlanters)} ") - val prepares = '{ - $combinedPlanters.map(perRowList => - (row: PrepareRow, session: Session) => - LiftsExtractor.apply[PrepareRow, Session](perRowList, row, session) - ) - } - val allPlanterExprs = (filteredPerRowLifts ++ secondaryLifts).map(_.plant) + val originalPlantersExpr = Expr.ofList(filteredPerRowLifts.map(_.plant)) val emptyContainsTokenExpr: Expr[Token => Token] = '{ $batchContextOperation.idiom.emptySetContainsToken(_) } val liftingPlaceholderExpr: Expr[Int => String] = '{ $batchContextOperation.idiom.liftingPlaceholder } - val particularQuery = Particularize.Static[PrepareRow](state.query, allPlanterExprs, liftingPlaceholderExpr, emptyContainsTokenExpr) + val queryExpr = Particularize.UnparticularQueryLiftable(state.query) + + import QueryExecutionBatchModel.{_, given} + val extractionBehaviorExpr = Expr(extractionBehavior) + + val batchGroups = '{ + QueryExecutionBatchIteration[PrepareRow, Session]( + $batchContextOperation.idiom, + $queryExpr, + $primaryPlanterLifts, + $otherPlanters, + $originalPlantersExpr, + $liftingPlaceholderExpr, + $emptyContainsTokenExpr, + $batchingBehavior, + $extractionBehaviorExpr + ) + } '{ - $batchContextOperation.execute(ContextOperation.Argument($particularQuery, $prepares.toArray, $extractor, ExecutionInfo(ExecutionType.Static, ${ Lifter(state.ast) }, ${ Lifter.quat(topLevelQuat) }), None)) + $batchContextOperation.execute(ContextOperation.BatchArgument($batchGroups, $extractor, ExecutionInfo(ExecutionType.Static, ${ Lifter(state.ast) }, ${ Lifter.quat(topLevelQuat) }), None)) } case None => @@ -509,8 +408,8 @@ object BatchQueryExecution: N <: NamingStrategy, Ctx <: Context[_, _], Res - ](ctx: ContextOperation[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res])(inline quoted: Quoted[BatchAction[A]]) = - ${ applyImpl[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]('quoted, 'ctx) } + ](ctx: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], rowsPerQuery: Int)(inline quoted: Quoted[BatchAction[A]]) = + ${ applyImpl[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]('quoted, 'ctx, 'rowsPerQuery) } def applyImpl[ I: Type, @@ -523,10 +422,10 @@ object BatchQueryExecution: N <: NamingStrategy: Type, Ctx <: Context[_, _], Res: Type - ](quoted: Expr[Quoted[BatchAction[A]]], ctx: Expr[ContextOperation[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]])(using Quotes, Type[Ctx]): Expr[Res] = - new RunQuery[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quoted, ctx).apply() + ](quoted: Expr[Quoted[BatchAction[A]]], ctx: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], rowsPerQuery: Expr[Int])(using Quotes, Type[Ctx]): Expr[Res] = + new RunQuery[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quoted, ctx, rowsPerQuery).apply() -end BatchQueryExecution +end QueryExecutionBatch object BatchStatic: case class Components[PrepareRow, Session]( @@ -565,7 +464,7 @@ object BatchStatic: def extractPrimaryComponents[I: Type, PrepareRow: Type, Session: Type]( primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, ast: Ast, - extractionBehavior: BatchQueryExecutionModel.BatchExtractBehavior + extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior )(using Quotes) = primaryPlanter match // In the case of liftQuery(entities) @@ -588,7 +487,7 @@ object BatchStatic: // return the combined batch components (Lifter(actionQueryAst), batchActionType, Expr.ofList(List(perRowLift))) - def apply[I: Type, PrepareRow: Type, Session: Type](ast: Ast, planters: List[PlanterExpr[?, ?, ?]], extractionBehavior: BatchQueryExecutionModel.BatchExtractBehavior)(using Quotes) = + def apply[I: Type, PrepareRow: Type, Session: Type](ast: Ast, planters: List[PlanterExpr[?, ?, ?]], extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior)(using Quotes) = import quotes.reflect._ // Given: Person(name, age) diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala new file mode 100644 index 000000000..d2c70b7bc --- /dev/null +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala @@ -0,0 +1,227 @@ +package io.getquill.context + +import scala.language.higherKinds +import scala.language.experimental.macros +import java.io.Closeable +import scala.compiletime.summonFrom +import scala.util.Try +import io.getquill.{ReturnAction} +import io.getquill.generic.EncodingDsl +import io.getquill.Quoted +import io.getquill.QueryMeta +import io.getquill.generic._ +import io.getquill.context.mirror.MirrorDecoders +import io.getquill.context.mirror.Row +import io.getquill.generic.GenericDecoder +import io.getquill.generic.GenericEncoder +import io.getquill.Planter +import io.getquill.EagerPlanter +import io.getquill.InjectableEagerPlanter +import io.getquill.LazyPlanter +import io.getquill.ast.Ast +import io.getquill.ast.Filter +import io.getquill.ast.Entity +import io.getquill.ast.ScalarTag +import io.getquill.ast.Returning +import io.getquill.ast.ReturningGenerated +import io.getquill.ast +import scala.quoted._ +import io.getquill.ast.{Transform, QuotationTag} +import io.getquill.QuotationLot +import io.getquill.metaprog.QuotedExpr +import io.getquill.metaprog.PlanterExpr +import io.getquill.metaprog.EagerEntitiesPlanterExpr +import io.getquill.Planter +import io.getquill.idiom.ReifyStatement +import io.getquill.Query +import io.getquill.Action +import io.getquill.idiom.Idiom +import io.getquill.NamingStrategy +import io.getquill.metaprog.Extractors._ +import io.getquill.BatchAction +import io.getquill.metaprog.QuotationLotExpr +import io.getquill.metaprog.QuotationLotExpr._ +import io.getquill.util.Format +import io.getquill.context.LiftMacro +import io.getquill.parser.Unlifter +import io.getquill._ +import io.getquill.QAC +import io.getquill.parser.Lifter +import io.getquill.metaprog.InjectableEagerPlanterExpr +import _root_.io.getquill.norm.BetaReduction +import io.getquill.context.Execution.ElaborationBehavior +import io.getquill.quat.Quat +import io.getquill.quat.QuatMaking +import io.getquill.metaprog.EagerListPlanterExpr +import io.getquill.metaprog.EagerPlanterExpr +import io.getquill.metaprog.SummonTranspileConfig +import io.getquill.norm.TranspileConfig +import io.getquill.metaprog.TranspileConfigLiftable +import io.getquill.idiom.Token + +object QueryExecutionBatchDynamic: + import QueryExecutionBatchModel._ + import PrepareDynamicExecution._ + + extension [T](element: Either[String, T]) + def rightOrException() = + element match + case Right(value) => value + case Left(error) => throw new IllegalArgumentException(error) + + // NOTE We don't need to process secondary planters anymore because that list is not being used. + // It is handled by the static state. Can removing everything having to do with secondary planters list in a future PR. + sealed trait PlanterKind + object PlanterKind: + case class PrimaryEntitiesList(planter: EagerEntitiesPlanter[?, ?, ?]) extends PlanterKind + case class PrimaryScalarList(planter: EagerListPlanter[?, ?, ?]) extends PlanterKind + case class Other(planter: Planter[?, ?, ?]) extends PlanterKind + + def organizePlanters(planters: List[Planter[?, ?, ?]]) = + planters.foldLeft((Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other])) { + case ((None, list), planter: EagerEntitiesPlanter[?, ?, ?]) => + val planterKind = PlanterKind.PrimaryEntitiesList(planter) + (Some(planterKind), list) + case ((None, list), planter: EagerListPlanter[?, ?, ?]) => + val planterKind = PlanterKind.PrimaryScalarList(planter) + (Some(planterKind), list) + case ((primary @ Some(_), list), planter) => + (primary, list :+ PlanterKind.Other(planter)) + // this means we haven't found the primary planter yet (don't think this can happen because nothing can be before liftQuery), keep going + case ((primary @ None, list), planter) => + throw new IllegalArgumentException("Invalid planter traversal") + } match { + case (Some(primary), categorizedPlanters) => (primary, categorizedPlanters) + case (None, _) => throw new IllegalArgumentException(s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters}") + } + + def extractPrimaryComponents[I, PrepareRow, Session]( + primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, + ast: Ast, + extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior + ) = + primaryPlanter match + // In the case of liftQuery(entities) + case PlanterKind.PrimaryEntitiesList(planter) => + val (actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior).rightOrException() + (actionQueryAst, batchActionType, planter.fieldGetters.asInstanceOf[List[InjectableEagerPlanter[?, PrepareRow, Session]]]) + // In the case of liftQuery(scalars) + // Note, we could have potential other liftQuery(scalars) later in the query for example: + // liftQuery(List("Joe","Jack","Jill")).foreach(query[Person].filter(name => liftQuery(1,2,3 /*ids of Joe,Jack,Jill respectively*/).contains(p.id)).update(_.name -> name)) + // Therefore we cannot assume that there is only one + case PlanterKind.PrimaryScalarList(planter) => + val uuid = java.util.UUID.randomUUID.toString + val (foreachReplacementAst, perRowLift) = + (ScalarTag(uuid), InjectableEagerPlanter((t: Any) => t, planter.encoder.asInstanceOf[io.getquill.generic.GenericEncoder[Any, PrepareRow, Session]], uuid)) + // create the full batch-query Ast using the value of actual query of the batch statement i.e. I in: + // liftQuery[...](...).foreach(p => query[I].insertValue(p)) + val (actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior).rightOrException() + // return the combined batch components + (actionQueryAst, batchActionType, List(perRowLift)) + + def apply[ + I, + T, + A <: QAC[I, T] & Action[I], + ResultRow, + PrepareRow, + Session, + D <: Idiom, + N <: NamingStrategy, + Ctx <: Context[_, _], + Res + ]( + quotedRaw: Quoted[BatchAction[A]], + batchContextOperation: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], + extractionBehavior: BatchExtractBehavior, + rawExtractor: Extraction[ResultRow, Session, T], + topLevelQuat: Quat, + transpileConfig: TranspileConfig, + batchingBehavior: BatchingBehavior + ) = { + // since real quotation could possibly be nested, need to get all splice all quotes and get all lifts in all runtimeQuote sections first + val ast = spliceQuotations(quotedRaw) + val lifts = gatherLifts(quotedRaw) + val idiom = batchContextOperation.idiom + val naming = batchContextOperation.naming + + // println(s"===== Spliced Ast: ====\n${io.getquill.util.Messages.qprint(ast)}") + // println(s"===== Initial Lifts: ====\n${io.getquill.util.Messages.qprint(lifts)}") + + // Given: Person(name, age) + // For the query: + // liftQuery(List(Person("Joe", 123))).foreach(p => query[Person].insertValue(p)) + // it would be (CaseClass(name->lift(A), age->lift(B)), BatchActionType.Insert, List(InjectableEagerLift(A), InjectableEagerLift(B)))) + // Same thing regardless of what kind of object is in the insert: + // liftQuery(List("foo")).foreach(name => query[Person].update(_.name -> name)) + // it would be (CaseClass(name->lift(A), age->lift(B)), BatchActionType.Update, List(InjectableEagerLift(A), InjectableEagerLift(B)))) + // + // That is why it is important to find the actual EagerEntitiesPlanterExpr (i.e. the part defined by `query[Person]`). That + // way we know the actual entity that needs to be lifted. + val (primaryPlanter, categorizedPlanters) = organizePlanters(lifts) + + // Use some custom functionality in the lift macro to prepare the case class an injectable lifts + // e.g. if T is Person(name: String, age: Int) and we do liftQuery(people:List[Person]).foreach(p => query[Person].insertValue(p)) + // Then: + // ast = CaseClass(name -> lift(UUID1), age -> lift(UUID2)) // NOTE: lift in the AST means a ScalarTag + // lifts = List(InjectableEagerLift(p.name, UUID1), InjectableEagerLift(p.age, UUID2)) + // e.g. if T is String and we do liftQuery(people:List[String]).foreach(p => query[Person].insertValue(Person(p, 123))) + // Then: + // ast = lift(UUID1) // I.e. ScalarTag(UUID1) since lift in the AST means a ScalarTag + // lifts = List(InjectableEagerLift(p, UUID1)) + val (actionQueryAst, batchActionType, perRowLifts) = extractPrimaryComponents[I, PrepareRow, Session](primaryPlanter, ast, extractionBehavior) + + // equivalent to static expandQuotation result + val dynamicExpandedQuotation = + batchActionType match + case BatchActionType.Insert => Quoted[Insert[I]](actionQueryAst, perRowLifts, Nil) // Already gathered queries and lifts from sub-clauses, don't need them anymore + case BatchActionType.Update => Quoted[Update[I]](actionQueryAst, perRowLifts, Nil) + // We need lifts for 'Delete' because it could have a WHERE clause + case BatchActionType.Delete => Quoted[Delete[I]](actionQueryAst, perRowLifts, Nil) + + val (stmt, outputAst, sortedLiftsRaw, extractor, sortedSecondaryLifts) = + PrepareDynamicExecution[I, T, T, D, N, PrepareRow, ResultRow, Session]( + dynamicExpandedQuotation, + rawExtractor, + idiom, + naming, + ElaborationBehavior.Skip, + topLevelQuat, + transpileConfig, + SpliceBehavior.AlreadySpliced, + categorizedPlanters.map(_.planter) + ) + + val sortedLifts = sortedLiftsRaw.asInstanceOf[List[InjectableEagerPlanter[_, _, _]]] + + def expandLiftQueryMembers(filteredPerRowLifts: List[Planter[?, ?, ?]], entities: Iterable[?]) = + entities.map { entity => + SingleEntityLifts(sortedLifts.map(lift => lift.withInject(entity))) + } + + // Get the planters needed for every element lift (see primaryPlanterLifts in BatchStatic for more detail) + val primaryPlanterLifts = + primaryPlanter match + case PlanterKind.PrimaryEntitiesList(entitiesPlanter) => + expandLiftQueryMembers(sortedLifts, entitiesPlanter.value).toList + case PlanterKind.PrimaryScalarList(scalarsPlanter) => + expandLiftQueryMembers(sortedLifts, scalarsPlanter.values).toList + + val (unparticularQuery, _) = Unparticular.Query.fromStatement(stmt, batchContextOperation.idiom.liftingPlaceholder) + + val batchGroups = + QueryExecutionBatchIteration[PrepareRow, Session]( + batchContextOperation.idiom, + unparticularQuery, + primaryPlanterLifts, + sortedSecondaryLifts, + sortedLifts, + idiom.liftingPlaceholder, + idiom.emptySetContainsToken, + batchingBehavior, + extractionBehavior + ) + val spliceAst = false + val executionAst = if (spliceAst) outputAst else io.getquill.ast.NullValue + batchContextOperation.execute(ContextOperation.BatchArgument(batchGroups, extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), None)) + } diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala new file mode 100644 index 000000000..f028998fe --- /dev/null +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala @@ -0,0 +1,310 @@ +package io.getquill.context + +import scala.language.higherKinds +import scala.language.experimental.macros +import java.io.Closeable +import scala.compiletime.summonFrom +import scala.util.Try +import io.getquill.{ReturnAction} +import io.getquill.generic.EncodingDsl +import io.getquill.Quoted +import io.getquill.QueryMeta +import io.getquill.generic._ +import io.getquill.context.mirror.MirrorDecoders +import io.getquill.context.mirror.Row +import io.getquill.generic.GenericDecoder +import io.getquill.generic.GenericEncoder +import io.getquill.Planter +import io.getquill.EagerPlanter +import io.getquill.InjectableEagerPlanter +import io.getquill.LazyPlanter +import io.getquill.ast.Ast +import io.getquill.ast.Filter +import io.getquill.ast.Entity +import io.getquill.ast.ScalarTag +import io.getquill.ast.Returning +import io.getquill.ast.ReturningGenerated +import io.getquill.ast +import scala.quoted._ +import io.getquill.ast.{Transform, QuotationTag} +import io.getquill.QuotationLot +import io.getquill.metaprog.QuotedExpr +import io.getquill.metaprog.PlanterExpr +import io.getquill.metaprog.EagerEntitiesPlanterExpr +import io.getquill.Planter +import io.getquill.idiom.ReifyStatement +import io.getquill.Query +import io.getquill.Action +import io.getquill.idiom.Idiom +import io.getquill.NamingStrategy +import io.getquill.metaprog.Extractors._ +import io.getquill.BatchAction +import io.getquill.metaprog.QuotationLotExpr +import io.getquill.metaprog.QuotationLotExpr._ +import io.getquill.util.Format +import io.getquill.context.LiftMacro +import io.getquill.parser.Unlifter +import io.getquill._ +import io.getquill.QAC +import io.getquill.parser.Lifter +import io.getquill.metaprog.InjectableEagerPlanterExpr +import _root_.io.getquill.norm.BetaReduction +import io.getquill.context.Execution.ElaborationBehavior +import io.getquill.quat.Quat +import io.getquill.quat.QuatMaking +import io.getquill.metaprog.EagerListPlanterExpr +import io.getquill.metaprog.EagerPlanterExpr +import io.getquill.metaprog.SummonTranspileConfig +import io.getquill.norm.TranspileConfig +import io.getquill.metaprog.TranspileConfigLiftable +import io.getquill.idiom.Token +import io.getquill.context.QueryExecutionBatchModel.SingleEntityLifts +import io.getquill.context.QueryExecutionBatchModel.BatchingBehavior +import io.getquill.context.QueryExecutionBatchModel.BatchExtractBehavior +import scala.util.Right +import scala.util.Left +import com.typesafe.scalalogging.Logger +import io.getquill.util.ContextLogger +import io.getquill.context.Execution.ExtractBehavior + +object QueryExecutionBatchIteration { + + private[getquill] val logger = ContextLogger(classOf[QueryExecutionBatchIteration.type]) + + def validateIdiomSupportsConcatenatedIteration(idiom: Idiom, extractBehavior: BatchExtractBehavior) = + extractBehavior match + case ExtractBehavior.Skip => + validateIdiomSupportsConcatenatedIterationNormal(idiom) + case ExtractBehavior.ExtractWithReturnAction => + validateIdiomSupportsConcatenatedIterationReturning(idiom) + + def validateIdiomSupportsConcatenatedIterationNormal(idiom: Idiom) = { + import io.getquill.context.InsertValueMulti + val hasCapability = + if (idiom.isInstanceOf[IdiomInsertValueCapability]) + idiom.asInstanceOf[IdiomInsertValueCapability].idiomInsertValuesCapability == InsertValueMulti + else + false + + if (hasCapability) + Right(()) + else + Left( + s"""|The dialect ${idiom.getClass.getName} does not support inserting multiple rows-per-batch (e.g. it cannot support multiple VALUES clauses). + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). + |Falling back to the regular single-row-per-batch insert behavior. + |""".stripMargin + ) + } + + def validateIdiomSupportsConcatenatedIterationReturning(idiom: Idiom) = { + import io.getquill.context.InsertValueMulti + val hasCapability = + if (idiom.isInstanceOf[IdiomInsertReturningValueCapability]) + idiom.asInstanceOf[IdiomInsertReturningValueCapability].idiomInsertReturningValuesCapability == InsertReturningValueMulti + else + false + + if (hasCapability) + Right(()) + else + Left( + s"""|The dialect ${idiom.getClass.getName} does not support inserting multiple rows-per-batch (e.g. it cannot support multiple VALUES clauses) + |when batching with query-returns and/or generated-keys. + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server). + |Falling back to the regular single-row-per-batch insert-returning behavior. + |""".stripMargin + ) + } + + def validateConcatenatedIterationPossible(query: Unparticular.Query, entitiesPerQuery: Int) = { + import io.getquill.idiom._ + def valueClauseExistsIn(token: Token): Boolean = + token match + case _: ValuesClauseToken => true + case _: StringToken => false + case _: ScalarTagToken => false + case _: QuotationTagToken => false + case _: ScalarLiftToken => false + case Statement(tokens: List[Token]) => tokens.exists(valueClauseExistsIn(_) == true) + case SetContainsToken(a: Token, op: Token, b: Token) => + valueClauseExistsIn(a) || valueClauseExistsIn(op) || valueClauseExistsIn(b) + + if (valueClauseExistsIn(query.realQuery)) + Right(()) + else + Left( + s"""|Cannot insert multiple (i.e. ${entitiesPerQuery}) rows per-batch-query since the query ${query.basicQuery} has no VALUES clause. + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). + |Falling back to the regular single-row-per-batch insert behavior. + |""".stripMargin + ) + } + + def apply[PrepareRow, Session]( + idiom: io.getquill.idiom.Idiom, + query: Unparticular.Query, + perRowLifts: List[SingleEntityLifts], + otherLifts: List[Planter[?, ?, ?]], + originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], + liftingPlaceholder: Int => String, + emptyContainsToken: Token => Token, + batchingBehavior: BatchingBehavior, + extractBehavior: BatchExtractBehavior + ): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = + new Executor( + idiom, + query, + perRowLifts, + otherLifts, + originalEntityLifts, + liftingPlaceholder, + emptyContainsToken, + batchingBehavior, + extractBehavior + ).apply() + + class Executor[PrepareRow, Session]( + idiom: io.getquill.idiom.Idiom, + query: Unparticular.Query, + perRowLifts: List[SingleEntityLifts], + otherLifts: List[Planter[?, ?, ?]], + originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], + liftingPlaceholder: Int => String, + emptyContainsToken: Token => Token, + batchingBehavior: BatchingBehavior, + extractBehavior: BatchExtractBehavior + ) { + def apply(): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = + batchingBehavior match + // If we have MultiRowsPerBatch behavior and we are instructed to concatenate multiple rows together (i.e. entitiesPerQuery > 1) + case BatchingBehavior.MultiRowsPerBatch(entitiesPerQuery) if (entitiesPerQuery > 1) => + val validations = + for { + _ <- validateConcatenatedIterationPossible(query, entitiesPerQuery) + _ <- validateIdiomSupportsConcatenatedIteration(idiom, extractBehavior) + } yield () + + validations match + case Left(msg) => + logger.underlying.warn(msg) + singleRowIteration() + case Right(_) => + concatenatedRowIteration(entitiesPerQuery) + + case _ => + singleRowIteration() + + // NOTE: First we can particularize for every query as explained below. + // If needed, at some point we can optimize and have just two query particularizations: + // if (entitiesCount <= batchSize) + // batch(single)(entitiesSize%batchSize) + // else + // insert-batch(notLast)(batchSize) + insert-batch(last)(entitiesSize%batchSize) + // + // In general we get the VALUES-clause lifts for all rows we want to insert in this query + // E.g. in a batch-set that looks like (assuming batchSize=3 i.e. each VALUES-clause is a row): + // (note that the WHERE clause is not technically possible insertions but can happen with UPDATEs that CAN have WHERE clauses) + // Query1: INSERT INTO Person (name, age) VALUES ('Joe', 22), ('Jack', 33), ('Jill', 44) WHERE something=liftedValue + // Query2: INSERT INTO Person (name, age) VALUES ('LastGuy', 55) WHERE something=liftedValue + // We will have the groups: + // (using [stuff, stuff] as list syntax), l:stuff for lift(stuff) a.k.a. Planter(stuff), sing: means singleQueryEntities + // Query1: sing:[ SingleEntityLifts([l:'Joe', l:22]), SingleEntityLifts([l:'Jack', l:33]), SingleEntityLifts([l:'Jill', l:44]) ], otherLifts:[liftedValue] + // Query2: sing:[ SingleEntityLifts([l:'LastGuy', l:55]) ], otherLifts:[liftedValue] + // + // Another possibility (This is actualy the 1st clause in the code arrangement below) is that there are only 3 rows in total, + // in that case there will only be one query: + // Query1: INSERT INTO Person (name, age) VALUES ('Joe', 22), ('Jack', 33), ('Jill', 44) WHERE something=liftedValue + // We will have just one group: + // Query1: sing:[ SingleEntityLifts([l:'Joe', l:22]), SingleEntityLifts([l:'Jack', l:33]), SingleEntityLifts([l:'Jill', l:44]) ], otherLifts:[liftedValue] + def concatenatedRowIteration(numEntitiesPerQuery: Int): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = { + val totalEntityCount = perRowLifts.length + val templateOfLifts = originalEntityLifts ++ otherLifts + + // if (entitiesCount <= batchSize) + // batch(single)(entitiesSize%batchSize) + if (totalEntityCount <= numEntitiesPerQuery) { + val (singleGroupQuery, liftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, /*valueClauseRepeats*/ totalEntityCount) + + // Since the entire query will fit into one bach, we don't need to subdivide the batches + // just make prepares based on all of the lifts + val orderedLifts = liftsOrderer.orderLifts(perRowLifts, otherLifts) + val prepares = + (row: PrepareRow, session: Session) => { + LiftsExtractor.apply[PrepareRow, Session](orderedLifts, row, session) + } + + // Output here is a single Query and Single prepares list (although the type is ) + List((singleGroupQuery, List(prepares))) + } + // else (entitiesCount > batchSize) + // insert-batch(notLast)(batchSize) + insert-batch(last)(entitiesSize % batchSize) + // In other words, if we are doing batchSize=1000 i.e. 1000 rows per query and we have 2200 rows, make three batch groups + // The 1st and 2nd that insert 1000 rows each, that's the queryForMostGroups + // The 3rd which only inserts 200 i.e. 2200 % batchSize + else { + val (anteriorQuery, anteriorLiftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, numEntitiesPerQuery) + val lastQueryEntityCount = totalEntityCount % numEntitiesPerQuery + val (lastQuery, lastLiftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, lastQueryEntityCount) + // println(s"Most Queries: ${numEntitiesPerQuery} Entities, Last Query: ${lastQueryEntityCount} Entities") + + // Say you have `liftQuery(A,B,C,D,E).foreach(...)` and numEntitiesPerQuery:=2 you need to do the following: + // (where by lift(As) I mean lift all the columns of A) + // Prepare(INSERT ... VALUES (?, ?)) run-on (lift(As), lift(Bs)), then run-on (lift(Cs), lift(Ds)) + // Then the one final query: + // Prepare(INSERT ... VALUES (?)) run-on (lift(Es)) + // Since perRowLifts.grouped(numEntitiesPerQuery).toList this case would be: + // List(List(A, B), List(C, D), List(E)) so we need to drop the last List(E) i.e. on the dropRight(1) + // + // If we have fewer entities to insert however (i.e. numEntitiesPerQuery > 2 e.g. just List(List(A))) then + // we just take that and do not drop anything. + val groupedLifts = perRowLifts.grouped(numEntitiesPerQuery).toList + val entitiesInQueries = + if (lastQueryEntityCount > 0) + groupedLifts.dropRight(1) + else + groupedLifts + + val anteriorPrepares = + entitiesInQueries.map { entitiesInOneQuery => + // So firstly we need to combine the `sing` and `otherLifts` groups (see comment above for more explanation) + val liftsInThisGroup = anteriorLiftsOrderer.orderLifts(entitiesInOneQuery, otherLifts) + (row: PrepareRow, session: Session) => + LiftsExtractor.apply[PrepareRow, Session](liftsInThisGroup, row, session) + } + val lastPrepare = { + val lastEntities = groupedLifts.last + val liftsInThisGroup = lastLiftsOrderer.orderLifts(lastEntities, otherLifts) + (row: PrepareRow, session: Session) => + LiftsExtractor.apply[PrepareRow, Session](liftsInThisGroup, row, session) + } + List( + (anteriorQuery, anteriorPrepares) + ) ++ ( + if (lastQueryEntityCount > 0) + List((lastQuery, List(lastPrepare))) + else + Nil + ) + } + } + + def singleRowIteration(): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = { + val numEntitiesInAllQueries = 1 + // Since every batch consists of one row inserted, can use the original InjectableEagerPlanter here to Particularize (i.e. insert the right number of '?' into) the query + val liftsInAllGroups = originalEntityLifts ++ otherLifts + val (allGroupsQuery, liftsOrderer) = Particularize.Dynamic(query, liftsInAllGroups, liftingPlaceholder, emptyContainsToken, numEntitiesInAllQueries) + val prepares = + perRowLifts.map { + liftsInThisGroup => + val orderedLifts = liftsOrderer.orderLifts(List(liftsInThisGroup), otherLifts) + { + (row: PrepareRow, session: Session) => + LiftsExtractor.apply[PrepareRow, Session](orderedLifts, row, session) + } + } + List((allGroupsQuery, prepares)) + } + + } +} diff --git a/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala b/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala index 1053df33b..4cac6b53d 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala @@ -69,6 +69,7 @@ object Unparticular: case SetContainsToken(a, op, b) => apply(stmt"$a $op ($b)" +: tail, sqlResult, liftingResult, liftingSize) case ScalarTagToken(tag) => apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, tag +: liftingResult, liftingSize + 1) case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize) + case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize) case _: ScalarLiftToken => throw new UnsupportedOperationException("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") case _: QuotationTagToken => diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala b/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala index 1a994730e..03c6eafb2 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala @@ -281,6 +281,36 @@ object QuotedExpr { sealed trait QuotationLotExpr object QuotationLotExpr { + def apply(expr: Expr[Any])(using Quotes): QuotationLotExpr = + unapply(expr).getOrElse { quotes.reflect.report.throwError(s"The expression: ${expr.show} is not a valid Quoted Expression and cannot be unquoted.") } + + // Verify that a quotation is inline. It is inline if all the lifts are inline. There is no need + // to search the AST since it has been parsed already + def unapply(expr: Expr[Any])(using Quotes): Option[QuotationLotExpr] = { + import quotes.reflect._ + expr match { + case vase @ `QuotationLot.apply`(quotation, uid, rest) => + quotation match + case quoted @ QuotedExpr.UprootableWithLifts(QuotedExpr(ast, _, _), lifts) => + // Note: If the `Quoted.apply` is inside an Inline, would we need to do the same thing that we do + // to the lifts (i.e. nesting the Inline inside them) to the 'rest' element? I don't think so + // because the Inline would be around `Quoted.apply` which is already inside of `QuotationLot.apply` + // i.e. it would be QuotationLot.apply(Inline(Quoted.apply(...)), ..., rest) so I don't see how 'rest' + // could get the contents of this Inner inline + Some(Uprootable(uid, ast, lifts)(quoted, vase.asInstanceOf[Expr[QuotationLot[Any]]], rest)) + + case _ => + Some(Pluckable(uid, quotation, rest)) + + // If it's a QuotationLot but we can't extract it at all, need to throw an error + case '{ ($qb: QuotationLot[t]) } => + Some(Pointable(qb)) + + case _ => + None + } + } + protected object `(QuotationLot).unquote` { def unapply(expr: Expr[Any])(using Quotes) = { import quotes.reflect._ @@ -308,7 +338,26 @@ object QuotationLotExpr { def unapply(expr: Expr[Any])(using Quotes): Option[(Expr[Quoted[Any]], String, List[Expr[_]])] = { import quotes.reflect._ - UntypeExpr(expr) match { + /* + * Specifically the inner `Uninline` part allows using metas e.g. InsertMeta that + * are defined in parent contexts e.g. + * class Stuff { object InnertStuff { inline given InsertMeta[Product] = insertMeta(_.id) } } + * and then imported into other places e.g. + * class OtherStuff extends Stuff { import InnerStuff.{given, _} } because multiple `Inline` blocks + * will be nested around the InsertMeta.apply part. + * It looks something like this: + * { + * // If you have a look at the Term-level, this outer layer is actually one or multiple Inlined(...) parts + * val BatchValuesSpec_this: BatchValuesJdbcSpec.this = Ex 1 - Batch Insert Normal$_this.1_ + * ((InsertMeta[Product](Quoted[Product](Tuple(List[Ast](Property.Opinionated(Ident.Opinionated("_$V", Quat.Product("id", "description", "sku")), "id", ...))).asInstanceOf[Ast], Nil, Nil), + * "2e594955-b45c-4532-9bd5-ec3b3eb04138"): InsertMeta[Product]): InsertMeta[Product]) + * } + * This will cause all manner of failure for example: + * "The InsertMeta form is invalid. It is Pointable." Also note that this is safe to do + * so long as we are not extracting any lifts from the Quoted.apply sections inside. + * Otherwise, we may run into unbound-variable issues when the lifts inside the Quoted.apply are extracted. + */ + UntypeExpr(Uninline(expr)) match { // Extract the entity, the uid and any other expressions the qutation bin may have // (e.g. the extractor if the QuotationLot is a QueryMeta). That `Uninline` // is needed because in some cases, the `underlyingArgument` call (that gets called somewhere before here) @@ -378,36 +427,6 @@ object QuotationLotExpr { } } - def apply(expr: Expr[Any])(using Quotes): QuotationLotExpr = - unapply(expr).getOrElse { quotes.reflect.report.throwError(s"The expression: ${expr.show} is not a valid Quoted Expression and cannot be unquoted.") } - - // Verify that a quotation is inline. It is inline if all the lifts are inline. There is no need - // to search the AST since it has been parsed already - def unapply(expr: Expr[Any])(using Quotes): Option[QuotationLotExpr] = { - import quotes.reflect._ - expr match { - case vase @ `QuotationLot.apply`(quotation, uid, rest) => - quotation match - case quoted @ QuotedExpr.UprootableWithLifts(QuotedExpr(ast, _, _), lifts) => - // Note: If the `Quoted.apply` is inside an Inline, would we need to do the same thing that we do - // to the lifts (i.e. nesting the Inline inside them) to the 'rest' element? I don't think so - // because the Inline would be around `Quoted.apply` which is already inside of `QuotationLot.apply` - // i.e. it would be QuotationLot.apply(Inline(Quoted.apply(...)), ..., rest) so I don't see how 'rest' - // could get the contents of this Inner inline - Some(Uprootable(uid, ast, lifts)(quoted, vase.asInstanceOf[Expr[QuotationLot[Any]]], rest)) - - case _ => - Some(Pluckable(uid, quotation, rest)) - - // If it's a QuotationLot but we can't extract it at all, need to throw an error - case '{ ($qb: QuotationLot[t]) } => - Some(Pointable(qb)) - - case _ => - None - } - } - // Not sure why this is needed by incremental compile breaks without it e.g. gives: // case class Pointable(expr: Expr[QuotationLot[Any]]) extends QuotationLotExpr import scala.quoted.Expr diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala b/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala index 098211fa7..cebdef2e7 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala @@ -3,6 +3,7 @@ package io.getquill.metaprog import scala.quoted._ import scala.quoted.Varargs import io.getquill.util.Format +import io.getquill.util.Messages.TraceType class Is[T: Type]: def unapply(expr: Expr[Any])(using Quotes) = @@ -556,10 +557,8 @@ object Extractors { any match // case i @ Inlined(_, pv, v) => - // TODO File a bug for this? Try exprMap to fill in the variables - // println Format(Printer.TreeStructure.show(i.underlyingArgument)) - report.warning(s"Ran into an inline on a clause: ${Format(Printer.TreeStructure.show(i.underlyingArgument))}. Proxy variables will be discarded: ${pv}") - // report.warning(s"Ran into an inline on a clause: ${Format.Term(i)}. Proxy variables will be discarded: ${pv}") + if (SummonTranspileConfig.summonTraceTypes(true).contains(TraceType.Meta)) + report.warning(s"Ran into an inline on a clause: ${Format(Printer.TreeStructure.show(i.underlyingArgument))}. Proxy variables will be discarded: ${pv}") v.underlyingArgument case _ => any } diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala b/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala index b38517ebb..8e728d2af 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala @@ -27,23 +27,31 @@ object SummonTranspileConfig: // report.info(conf.toString) conf - def summonTraceTypes()(using Quotes): List[TraceType] = + def summonTraceTypes(orFromProperties: Boolean = false)(using Quotes): List[TraceType] = import quotes.reflect._ - val enableTraceExpr = Expr.summon[EnableTrace].getOrElse('{ EnableTraceNone }) - val foundTraceTypeNames = findHListMembers(enableTraceExpr, "Trace").map(_.typeSymbol.name) - TraceType.values.filter { trace => - val simpleName = parseSealedTraitClassName(trace.getClass) - foundTraceTypeNames.contains(simpleName) - } + Expr.summon[EnableTrace] match + case Some(enableTraceExpr) => + val foundTraceTypeNames = findHListMembers(enableTraceExpr, "Trace").map(_.typeSymbol.name) + TraceType.values.filter { trace => + val simpleName = parseSealedTraitClassName(trace.getClass) + foundTraceTypeNames.contains(simpleName) + } + case None => + if (orFromProperties) + io.getquill.util.GetTraces() + else + List() def summonPhaseDisables()(using Quotes): List[OptionalPhase] = import quotes.reflect._ - val disablePhaseExpr = Expr.summon[DisablePhase].getOrElse('{ DisablePhaseNone }) - val disablePhaseTypeNames = findHListMembers(disablePhaseExpr, "Phase").map(_.typeSymbol.name) - OptionalPhase.all.filter { phase => - val simpleName = parseSealedTraitClassName(phase.getClass) - disablePhaseTypeNames.contains(simpleName) - } + Expr.summon[DisablePhase] match + case Some(disablePhaseExpr) => + val disablePhaseTypeNames = findHListMembers(disablePhaseExpr, "Phase").map(_.typeSymbol.name) + OptionalPhase.all.filter { phase => + val simpleName = parseSealedTraitClassName(phase.getClass) + disablePhaseTypeNames.contains(simpleName) + } + case None => List() def findHListMembers(baseExpr: Expr[_], typeMemberName: String)(using Quotes): List[quotes.reflect.TypeRepr] = import quotes.reflect._ diff --git a/quill-sql/src/main/scala/io/getquill/util/GetTraces.scala b/quill-sql/src/main/scala/io/getquill/util/GetTraces.scala new file mode 100644 index 000000000..142222291 --- /dev/null +++ b/quill-sql/src/main/scala/io/getquill/util/GetTraces.scala @@ -0,0 +1,6 @@ +package io.getquill.util + +// Proxy because Messages.traces is package-specific +// TODO Need to change ownership there to getQuill +object GetTraces: + def apply() = io.getquill.util.Messages.traces diff --git a/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala b/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala new file mode 100644 index 000000000..5420e8496 --- /dev/null +++ b/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala @@ -0,0 +1,271 @@ +package io.getquill + +import scala.language.implicitConversions +import io.getquill.Quoted +import io.getquill.ast._ +import io.getquill.QuotationLot +import io.getquill.QuotationVase +import io.getquill.context.ExecutionType +import org.scalatest._ +import io.getquill.quat.quatOf +import io.getquill.context.ExecutionType.Static +import io.getquill.context.ExecutionType.Dynamic +import io.getquill.context.Context +import io.getquill.quote +import io.getquill.query +import io.getquill.util.debug.PrintMac + +class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDialect, Literal] { + // Need to fully type this otherwise scala compiler thinks it's still just 'Context' from the super-class + // and the extensions (m: MirrorContext[_, _]#BatchActionMirror) etc... classes in Spec don't match their types correctly + val ctx: MirrorContext[PostgresDialect, Literal] = new MirrorContext[PostgresDialect, Literal](PostgresDialect, Literal) + import ctx._ + + "Multi-row Batch Action Should work with" - { + "inserts > batch-size - (2rows + 2rows) + (1row)" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + def expect(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?), (?, ?, ?)", + List(List(1, "A", 111, 2, "B", 222), List(3, "C", 333, 4, "D", 444)), + executionType + ), + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", + List(List(5, "E", 555)), + executionType + ) + ) + + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + + def expect2(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ((? || ?) || 'bar'), ?), (?, ((? || ?) || 'bar'), ?)", + List(List(1, "foo", "A", 111, 2, "foo", "B", 222), List(3, "foo", "C", 333, 4, "foo", "D", 444)), + executionType + ), + ( + "INSERT INTO Person (id,name,age) VALUES (?, ((? || ?) || 'bar'), ?)", + List(List(5, "foo", "E", 555)), + executionType + ) + ) + "static - mixed" in { + val static = ctx.run(liftQuery(people).foreach(p => query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age)), 2) + static.tripleBatchMulti mustEqual expect2(ExecutionType.Static) + } + "dynamic - mixed" in { + // TODO Why does it not print that a dynamic query is being run? + val q = quote(liftQuery(people).foreach(p => query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age))) + val static = ctx.run(q, 2) + static.tripleBatchMulti mustEqual expect2(ExecutionType.Dynamic) + } + } + + "batch insert - (2rows + 2rows)" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444)) + def expect(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?), (?, ?, ?)", + List(List(1, "A", 111, 2, "B", 222), List(3, "C", 333, 4, "D", 444)), + executionType + ) + ) + + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + + "inserts == batch-size" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222)) + def expect(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?), (?, ?, ?)", + List(List(1, "A", 111, 2, "B", 222)), + executionType + ) + ) + + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + + "inserts < batch-size - (1row)" - { + val people = List(Person(1, "A", 111)) + def expect(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", + List(List(1, "A", 111)), + executionType + ) + ) + + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + + "fallback for non-insert query" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + def expect(executionType: ExecutionType) = + List( + ( + "UPDATE Person AS pt SET id = ?, name = ?, age = ? WHERE pt.id = ?", + List(List(1, "A", 111, 1), List(2, "B", 222, 2), List(3, "C", 333, 3), List(4, "D", 444, 4), List(5, "E", 555, 5)), + executionType + ) + ) + + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => updatePeopleById(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => updatePeopleByIdDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + + "supported contexts" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + def makeRow(executionType: ExecutionType)(queryA: String, queryB: String) = + List( + ( + queryA, + List(List(1, "A", 111, 2, "B", 222), List(3, "C", 333, 4, "D", 444)), + executionType + ), + ( + queryB, + List(List(5, "E", 555)), + executionType + ) + ) + + def expect(executionType: ExecutionType) = + makeRow(executionType)( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?), (?, ?, ?)", + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?)" + ) + + def expectH2(executionType: ExecutionType) = + makeRow(executionType)( + "INSERT INTO Person (id,name,age) VALUES ($1, $2, $3), ($4, $5, $6)", + "INSERT INTO Person (id,name,age) VALUES ($1, $2, $3)" + ) + + def expectPostgresReturning(executionType: ExecutionType) = + makeRow(executionType)( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?), (?, ?, ?) RETURNING id", + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?) RETURNING id" + ) + + def expectSqlServerReturning(executionType: ExecutionType) = + makeRow(executionType)( + "INSERT INTO Person (id,name,age) OUTPUT INSERTED.id VALUES (?, ?, ?), (?, ?, ?)", + "INSERT INTO Person (id,name,age) OUTPUT INSERTED.id VALUES (?, ?, ?)" + ) + + "postgres - regular/returning" in { + val ctx: MirrorContext[PostgresDialect, Literal] = new MirrorContext(PostgresDialect, Literal) + import ctx._ + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectPostgresReturning(ExecutionType.Static) + } + "sqlserver - regular/returning" in { + val ctx: MirrorContext[SQLServerDialect, Literal] = new MirrorContext(SQLServerDialect, Literal) + import ctx._ + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectSqlServerReturning(ExecutionType.Static) + } + "mysql - regular/returning" in { + val ctx: MirrorContext[MySQLDialect, Literal] = new MirrorContext(MySQLDialect, Literal) + import ctx._ + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + "h2 - regular/returning" in { + val ctx: MirrorContext[H2Dialect, Literal] = new MirrorContext(H2Dialect, Literal) + import ctx._ + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expectH2(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectH2(ExecutionType.Static) + } + "sqlite - only regular" in { + val ctx: MirrorContext[SqliteDialect, Literal] = new MirrorContext(SqliteDialect, Literal) + import ctx._ + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + } + + "fallback for non-supported context" - { + val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + def expect(executionType: ExecutionType) = + List( + ( + "INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", + List(List(1, "A", 111), List(2, "B", 222), List(3, "C", 333), List(4, "D", 444), List(5, "E", 555)), + executionType + ) + ) + + "oracle" - { + val ctx: MirrorContext[OracleDialect, Literal] = new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) + import ctx._ + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + "sqlite - with returning clause" - { + val ctx: MirrorContext[OracleDialect, Literal] = new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) + import ctx._ + "static" in { + val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2) + static.tripleBatchMulti mustEqual expect(ExecutionType.Static) + } + "dynamic" in { + val dynamic = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p).returning(_.id)), 2) + dynamic.tripleBatchMulti mustEqual expect(ExecutionType.Dynamic) + } + } + } + } +} diff --git a/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala b/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala index bd67af00f..8320d93b3 100644 --- a/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala +++ b/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala @@ -21,7 +21,7 @@ trait SuperContext[D <: io.getquill.idiom.Idiom, N <: NamingStrategy] { // Also note that the context needs to be typed. As an example of how to do that, we passed typing parameters // through the class. If the parameters are removed (i.e. used `val ctx: Context[_, _]`), the LoadModule will try to // load the base-object `Idiom` because that is the minimal thing that the Dialect parameter needs - // (and it seems LoadModule in BatchQueryExecution does not yet know what the values of the _, _ in Context[_, _] + // (and it seems LoadModule in QueryExecutionBatch does not yet know what the values of the _, _ in Context[_, _] // are supposed to be) val ctx: Context[D, N] // import ctx._ @@ -29,6 +29,9 @@ trait SuperContext[D <: io.getquill.idiom.Idiom, N <: NamingStrategy] { case class Person(id: Int, name: String, age: Int) inline def insertPeople = quote((p: Person) => query[Person].insertValue(p)) val insertPeopleDynamic = quote((p: Person) => query[Person].insertValue(p)) + + inline def updatePeopleById = quote((p: Person) => query[Person].filter(pt => pt.id == p.id).updateValue(p)) + val updatePeopleByIdDynamic = quote((p: Person) => query[Person].filter(pt => pt.id == p.id).updateValue(p)) } class BatchActionTest extends Spec with Inside with SuperContext[PostgresDialect, Literal] { diff --git a/quill-sql/src/test/scala/io/getquill/Spec.scala b/quill-sql/src/test/scala/io/getquill/Spec.scala index cf4c40750..f4283f911 100644 --- a/quill-sql/src/test/scala/io/getquill/Spec.scala +++ b/quill-sql/src/test/scala/io/getquill/Spec.scala @@ -36,6 +36,22 @@ abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { m.info.executionType ) + extension (m: MirrorContextBase[_, _]#BatchActionReturningMirror[_]) + def tripleBatchMulti = + m.groups.map { (queryString, returnAction, prepares) => + ( + queryString, + prepares.map { prep => + // being explicit here about the fact that this is done per prepare element i.e. all of them are supposed to be Row instances + prep match { + case r: io.getquill.context.mirror.Row => + r.data.map(data => deIndexify(data._2)) + } + }, + m.info.executionType + ) + } + private def deIndexify(value: Any): Any = value match case Some((Row.TupleIndex(a) -> b)) => Some(deIndexify(b)) @@ -59,6 +75,22 @@ abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { m.info.executionType ) + extension (m: MirrorContextBase[_, _]#BatchActionMirror) + def tripleBatchMulti = + m.groups.map { (queryString, prepares) => + ( + queryString, + prepares.map { prep => + // being explicit here about the fact that this is done per prepare element i.e. all of them are supposed to be Row instances + prep match { + case r: io.getquill.context.mirror.Row => + r.data.map(data => deIndexify(data._2)) + } + }, + m.info.executionType + ) + } + extension (m: MirrorContextBase[_, _]#ActionMirror) def triple = ( diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala new file mode 100644 index 000000000..c7c25c0b9 --- /dev/null +++ b/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala @@ -0,0 +1,60 @@ +package io.getquill.context.sql + +import io.getquill._ +import org.scalatest.BeforeAndAfterEach + +trait BatchValuesSpec extends Spec with BeforeAndAfterEach { + + val context: SqlContext[_, _] + import context._ + + case class Product(id: Int, description: String, sku: Long) + + inline def insertProduct = + quote((p: Product) => query[Product].insertValue(p)) + + def makeProducts(maxRows: Int) = + (1 to 22).map(i => Product(i, s"Product-${i}", i * 100)) + + object `Ex 1 - Batch Insert Normal` { + inline given InsertMeta[Product] = insertMeta(_.id) + val products = makeProducts(22) + val batchSize = 5 + inline def opExt = quote { + (transform: Insert[Product] => Insert[Product]) => + liftQuery(products).foreach(p => transform(query[Product].insertValue(p))) + } + inline def op = quote { + liftQuery(products).foreach(p => query[Product].insertValue(p)) + } + inline def get = quote { query[Product] } + def result = products + } + + object `Ex 2 - Batch Insert Returning` { + val productsOriginal = makeProducts(20) + // want to populate them from DB + val products = productsOriginal.map(p => p.copy(id = 0)) + val expectedIds = productsOriginal.map(_.id) + val batchSize = 10 + inline def op = quote { + liftQuery(products).foreach(p => query[Product].insertValue(p).returningGenerated(p => p.id)) + } + inline def get = quote { query[Product] } + def result = productsOriginal + } + + object `Ex 3 - Batch Insert Mixed` { + val products = makeProducts(20) + val batchSize = 40 + inline def op = quote { + liftQuery(products).foreach(p => query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku)) + } + inline def opExt = quote { + (transform: Insert[Product] => Insert[Product]) => + liftQuery(products).foreach(p => transform(query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku))) + } + inline def get = quote { query[Product] } + def result = products.map(_.copy(description = "BlahBlah")) + } +} diff --git a/scripts/increase_postgres_latency.sh b/scripts/increase_postgres_latency.sh new file mode 100755 index 000000000..95c50a19f --- /dev/null +++ b/scripts/increase_postgres_latency.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Make the Postgres DB image have 100ms latency for all requests. +# This gives a more realistic understanding of batch-query performance +# since in most corporate environments DBs are on a separate server +# potentially multiple network-hops away. +docker exec protoquill_postgres_1 tc qdisc add dev eth0 root netem delay 50ms \ No newline at end of file diff --git a/start_containers.sh b/scripts/start_containers.sh similarity index 100% rename from start_containers.sh rename to scripts/start_containers.sh