Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove matrowl and do token splicing at runtime #168

Merged
merged 1 commit into from
Jul 31, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll {
// FlatSchema and NestedSchema share the same DB data so only need to create it using one of them
override def beforeAll() = {
import FlatSchema._
(for {
(for { //
_ <- Ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]])
_ <- Ctx.run(liftQuery(ExampleData.people).foreach(row => query[PersonT].insertValue(row)))
_ <- Ctx.run(liftQuery(ExampleData.addresses).foreach(row => query[AddressT].insertValue(row)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ import io.getquill.metaprog.EagerPlanterExpr
import io.getquill.metaprog.SummonTranspileConfig
import io.getquill.norm.TranspileConfig
import io.getquill.metaprog.TranspileConfigLiftable
import io.getquill.idiom.Token

private[getquill] enum BatchActionType:
case Insert
Expand Down Expand Up @@ -470,7 +471,10 @@ object BatchQueryExecution:
}

val allPlanterExprs = (filteredPerRowLifts ++ secondaryLifts).map(_.plant)
val particularQuery = Particularize.Static(state.query, allPlanterExprs, '{ $batchContextOperation.idiom.liftingPlaceholder }, state.idiom.emptySetContainsToken)

val emptyContainsTokenExpr: Expr[Token => Token] = '{ $batchContextOperation.idiom.emptySetContainsToken(_) }
val liftingPlaceholderExpr: Expr[Int => String] = '{ $batchContextOperation.idiom.liftingPlaceholder }
val particularQuery = Particularize.Static[PrepareRow](state.query, allPlanterExprs, liftingPlaceholderExpr, emptyContainsTokenExpr)

'{
$batchContextOperation.execute(ContextOperation.Argument($particularQuery, $prepares.toArray, $extractor, ExecutionInfo(ExecutionType.Static, ${ Lifter(state.ast) }, ${ Lifter.quat(topLevelQuat) }), None))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ object InsertUpdateMacro {
case astIdent: AIdent => deduceAssignmentsFromIdent(astIdent)

// Insertion could have lifts and quotes inside, need to extract those.
// E.g. it can be 'query[Person].insertValue(lift(Person("Joe",123)))'' which becomes Quoted(CaseClass(name -> lift(x), age -> lift(y), List(ScalarLift("Joe", x), ScalarLift(123, y)), Nil).
// E.g. it can be 'query[Person].insertValue(lift(Person("Joe",123)))'' which becomes Quoted(CaseClass(name -> lift(x), age -> lift(y), List(EagerLift("Joe", x), EagerLift(123, y)), Nil).
// (In some cases, maybe even the runtimeQuotes position could contain things)
// However, the insertee itself must always be available statically (i.e. it must be a Uprootable Quotation)
val (lifts, pluckedUnquotes) = ExtractLifts(inserteeRaw)
Expand Down
242 changes: 38 additions & 204 deletions quill-sql/src/main/scala/io/getquill/context/Particularize.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import io.getquill.idiom._
import scala.quoted._
import io.getquill.util.Format
import io.getquill.metaprog.InjectableEagerPlanterExpr
import io.getquill.parser.Lifter

/**
* For a query that has a filter(p => liftQuery(List("Joe","Jack")).contains(p.name)) we need to turn
Expand All @@ -29,214 +30,47 @@ import io.getquill.metaprog.InjectableEagerPlanterExpr
* which has to be manipulated inside of a '{ ... } block.
*/
object Particularize:
// ====================================== TODO additional-lifts case here too ======================================
// ====================================== TODO additional-lifts case here too ======================================
// ====================================== TODO additional-lifts case here too ======================================
// ====================================== TODO additional-lifts case here too ======================================
// ====================================== TODO additional-lifts case here too ======================================
private[getquill] object UnparticularQueryLiftable:
def apply(token: Unparticular.Query)(using Quotes) = liftableUnparticularQuery(token)
extension [T](t: T)(using ToExpr[T], Quotes) def expr: Expr[T] = Expr(t)
import io.getquill.parser.BasicLiftable

given liftableUnparticularQuery: BasicLiftable[Unparticular.Query] with
def lift =
case Unparticular.Query(basicQuery: String, realQuery: Statement) =>
'{ Unparticular.Query(${ basicQuery.expr }, ${ StatementLiftable(realQuery) }) }
end UnparticularQueryLiftable

private[getquill] object StatementLiftable:
def apply(token: Statement)(using Quotes) = liftableStatement(token)
extension [T](t: T)(using ToExpr[T], Quotes) def expr: Expr[T] = Expr(t)
import io.getquill.parser.BasicLiftable

given liftableToken: BasicLiftable[Token] with
def lift =
// Note strange errors about SerializeHelper.fromSerialized types can happen here if NotSerializing is not true.
// Anyway we do not want tag-serialization here for the sake of simplicity for the tokenization which happens at runtime.
// AST serialization is generally used to make unlifting deeply nested ASTs simpler but Quotation/Scalar Tags are only 1-level deep.
case ScalarTagToken(lift: ScalarTag) => '{ io.getquill.idiom.ScalarTagToken(${ Lifter.NotSerializing.scalarTag(lift) }) }
case QuotationTagToken(lift: QuotationTag) => '{ io.getquill.idiom.QuotationTagToken(${ Lifter.NotSerializing.quotationTag(lift) }) }
case StringToken(string) => '{ io.getquill.idiom.StringToken(${ string.expr }) }
case s: Statement => liftableStatement(s)
case SetContainsToken(a, op, b) => '{ io.getquill.idiom.SetContainsToken(${ a.expr }, ${ op.expr }, ${ b.expr }) }
case ScalarLiftToken(lift) => quotes.reflect.report.throwError("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.")

given liftableStatement: BasicLiftable[Statement] with
def lift =
case Statement(tokens) => '{ io.getquill.idiom.Statement(${ tokens.expr }) }
end StatementLiftable

// the following should test for that: update - extra lift + scalars + liftQuery/setContains
object Static:
/** Convenience constructor for doing particularization from an Unparticular.Query */
def apply[PrepareRowTemp](query: Unparticular.Query, lifts: List[Expr[Planter[_, _, _]]], runtimeLiftingPlaceholder: Expr[Int => String], emptySetContainsToken: Token => Token)(using Quotes): Expr[String] =
raw(query.realQuery, lifts, runtimeLiftingPlaceholder, emptySetContainsToken)

private[getquill] def raw[PrepareRowTemp, Session](statement: Statement, lifts: List[Expr[Planter[_, _, _]]], runtimeLiftingPlaceholder: Expr[Int => String], emptySetContainsToken: Token => Token)(using Quotes): Expr[String] = {
def apply[PrepareRowTemp: Type](query: Unparticular.Query, lifts: List[Expr[Planter[_, _, _]]], runtimeLiftingPlaceholder: Expr[Int => String], emptySetContainsToken: Expr[Token => Token])(using Quotes): Expr[String] =
import quotes.reflect._

enum LiftChoice:
case ListLift(value: EagerListPlanterExpr[Any, PrepareRowTemp, Session])
case SingleLift(value: PlanterExpr[Any, PrepareRowTemp, Session])

val listLifts: Map[String, EagerListPlanterExpr[Any, PrepareRowTemp, Session]] =
lifts.collect {
case PlanterExpr.Uprootable(planterExpr: EagerListPlanterExpr[_, _, _]) =>
planterExpr.asInstanceOf[EagerListPlanterExpr[Any, PrepareRowTemp, Session]]
}.map(lift => (lift.uid, lift)).toMap

val singleLifts: Map[String, EagerPlanterExpr[Any, PrepareRowTemp, Session]] =
lifts.collect {
case PlanterExpr.Uprootable(planterExpr: EagerPlanterExpr[_, _, _]) =>
planterExpr.asInstanceOf[EagerPlanterExpr[Any, PrepareRowTemp, Session]]
}.map(lift => (lift.uid, lift)).toMap

val injectableLifts: Map[String, InjectableEagerPlanterExpr[Any, PrepareRowTemp, Session]] =
lifts.collect {
case PlanterExpr.Uprootable(planterExpr: InjectableEagerPlanterExpr[_, _, _]) =>
planterExpr.asInstanceOf[InjectableEagerPlanterExpr[Any, PrepareRowTemp, Session]]
}.map(lift => (lift.uid, lift)).toMap

def getLifts(uid: String): LiftChoice =
listLifts.get(uid).map(LiftChoice.ListLift(_))
.orElse(singleLifts.get(uid).map(LiftChoice.SingleLift(_)))
.orElse(injectableLifts.get(uid).map(LiftChoice.SingleLift(_)))
.getOrElse {
throw new IllegalArgumentException(s"Cannot find list-lift with UID ${uid} (from all the lifts ${lifts.map(io.getquill.util.Format.Expr(_))})")
}

/**
* Actual go from a liftQuery(List("Joe", "Jack")) to "?, ?" using the lifting placeholder.
* Also return how much the index should be incremented
*/
def placeholders(uid: String, initialIndex: Expr[Int]): (Expr[Int], Expr[String], LiftChoice) =
val liftType = getLifts(uid)
liftType match
case LiftChoice.ListLift(lifts) =>
// using index 1 since SQL prepares start with $1 typically
val liftsPlaceholder = '{ ${ lifts.expr }.zipWithIndex.map((_, index) => $runtimeLiftingPlaceholder($initialIndex + index)).mkString(", ") }
val liftsLength = '{ ${ lifts.expr }.length }
(liftsLength, liftsPlaceholder, liftType)
case LiftChoice.SingleLift(lift) =>
(Expr(1), '{ $runtimeLiftingPlaceholder($initialIndex) }, liftType)

object Matrowl:
sealed trait Ground:
override def toString = "Gnd"
case object Ground extends Ground
def Bottom = Matrowl(List(), Matrowl.Ground)

/**
* A Matrowl lit. Matroshka + Bowl is essentially a stack where each frame consists of a list of items.
* You can add to the list on the top of the stack, pop the current Matrowl, or stack another one on top of it.
* This datastructure became necessary in the token2Expr function when I realized in that in the case of:
* Work.Token(SetContainsToken(a, op, b @ ScalarTagToken(tag))), the list this tag points to can be empty which means
* that the emptySetContainsToken needs to be expanded instead of the Expr[String] that is returned by the placeholders
* function. The problem however is that we only know at runtime whether the list is zero or non-zero elements long.
* This leads us to the requirement to either make token2Expr on tail recursive and introduce a something like this:
* {{
* case Work.Token(SetContainsToken(a, op, b @ ScalarTagToken(tag))) =>
* '{if (list.length != 0)
* token2Expr(...)
* else
* token2Expr(emptySetContainsToken(a), ...)
* }
* }}
* This of course is no longer tail-recursive and therefore would require a stack-frame for every token
* that needs to be Expr[String]'ed. One possible alternative would be to trampoline the entire execution
* however, that would likely introduce a significant performance penalty. Instead, a simplification can be made
* in which variations of the conditional (i.e. the regular expansion and the emptySetContainsToken one)
* are expanded and they are kept separate in the 'done-pile' of token2Expr in some kind of data strucuture
* from which they can be picked up later.
* The following sequence of steps therefore emerges when running into a
* `case Work.Token(SetContainsToken(a, op, b @ ScalarTagToken(tag)))` where `tag` is a list lift:
* <li> Take the current done-area of token2Expr and stack a new matrowl above it
* <li> Process all the tokens that would be needed to apply a emptySetContainsToken tokenization
* <li> Add yet another stack frame on top of the matrowl
* <li> Process all the tokens that would be needed to apply a regular tokenization of the list
* i.e. `stmt"$a $op (") :: Work.AlreadyDone(liftsExpr) :: Work.Token(stmt")")` etc... and place them
* onto a the matrowl we just created.
* <li> Pop the two created stack frames into groups (one, two) and splice them into the '{if (list.length != 0) {one} else {two}}`
* note that they will come out in the opposite order from which they were put in.
*/
case class Matrowl private (doneWorks: List[Expr[String]], below: Matrowl | Matrowl.Ground):
def dropIn(doneWork: Expr[String]): Matrowl =
// println(s"Dropping: ${Format.Expr(doneWork)} into ${this.toString}")
this.copy(doneWorks = doneWork +: this.doneWorks)
def stack: Matrowl =
// println(s"Stack New Matrowl ():=> ${this.toString}")
Matrowl(List(), this)
def pop: (List[Expr[String]], Matrowl) =
// println(s"Pop Top Matrowl: ${this.toString}")
below match
case m: Matrowl => (doneWorks, m)
case e: Matrowl.Ground => report.throwError("Tokenization error, attempted to pop a bottom-level element")
def pop2: (List[Expr[String]], List[Expr[String]], Matrowl) =
// println(s"Pop Two Matrowls...")
val (one, firstBelow) = pop
val (two, secondBelow) = firstBelow.pop
(one, two, secondBelow)
def isBottom: Boolean =
below match
case m: Matrowl => false
case e: Matrowl.Ground => true
def scoop: List[Expr[String]] =
// println(s"Scoop From Matrowl: ${this.toString}")
doneWorks
override def toString = s"(${doneWorks.map(Format.Expr(_)).mkString(", ")}) -> ${below.toString}"
end Matrowl

enum Work:
case AlreadyDone(expr: Expr[String])
case Token(token: io.getquill.idiom.Token)
// Stack the Matrowl
case Stack
// Pop the Matrowl
case Pop2(finished: (Expr[String], Expr[String]) => Expr[String])
object Work:
def StackL = List(Work.Stack)

extension (stringExprs: Seq[Expr[String]])
def mkStringExpr = stringExprs.foldLeft(Expr(""))((concatonation, nextExpr) => '{ $concatonation + $nextExpr })

def token2Expr(token: Token): Expr[String] = {
@tailrec
def apply(
workList: List[Work],
matrowl: Matrowl,
placeholderCount: Expr[Int] // I.e. the index of the '?' that is inserted in the query (that represents a lift) or the $N if an actual number is used (e.g. in the H2 context)
): Expr[String] = workList match {
case Nil =>
if (!matrowl.isBottom)
report.throwError("Did not get to the bottom of the stack while tokenizing")
matrowl.scoop.reverse.mkStringExpr
case head :: tail =>
head match {
case Work.Stack => apply(tail, matrowl.stack, placeholderCount)
case Work.Pop2(finished) =>
// we expect left := workIfListNotEmpty and right := workIfListEmpty
// this is the logical completion of the SetContainsToken(a, op, ScalarTagToken(tag)) case
// (note that these should come off in reversed order from the one they were put in)
val (left, right, restOfMatrowl) = matrowl.pop2
val finishedExpr = finished(left.reverse.mkStringExpr, right.reverse.mkStringExpr)
apply(tail, restOfMatrowl.dropIn(finishedExpr), placeholderCount)

case Work.AlreadyDone(expr) => apply(tail, matrowl.dropIn(expr), placeholderCount)
case Work.Token(StringToken(s2)) => apply(tail, matrowl.dropIn(Expr(s2)), placeholderCount)
case Work.Token(SetContainsToken(a, op, b @ ScalarTagToken(tag))) =>
val (liftsLength, liftsExpr, liftChoice) = placeholders(tag.uid, placeholderCount)
liftChoice match
// If it is a list that could be empty, we have to create a branch structure that will expand
// both variants of that using the Matrowl nested structure
case LiftChoice.ListLift(_) =>
val workIfListNotEmpty = Work.Token(stmt"$a $op (") :: Work.AlreadyDone(liftsExpr) :: Work.Token(stmt")") :: Nil
val workIfListEmpty = List(Work.Token(emptySetContainsToken(a)))
val complete =
(workIfListNotEmpty: Expr[String], workIfListEmpty: Expr[String]) =>
'{
if ($liftsLength != 0) $workIfListNotEmpty else $workIfListEmpty
}
val work = Work.StackL ::: workIfListEmpty ::: Work.StackL ::: workIfListNotEmpty ::: List(Work.Pop2(complete))
// println(s"** Push Two Variants ** - \nWork is: ${work}\nTail is: ${tail}")
// We can spliced liftsLength combo even if we're not splicing in the array itself (i.e. in cases)
// where we're splicing the empty token. That's fine since when we're splicing the empty token, the
// array length is zero.
apply(work ::: tail, matrowl, '{ $placeholderCount + $liftsLength })

// Otherwise it's just a regular scalar-token expansion
case _ =>
// println(s"** Push One Variant ** - \nWork is: ${stmt"$a $op ($b)"}\nTail is: ${tail}")
apply(Work.Token(stmt"$a $op ($b)") +: tail, matrowl, placeholderCount)

// The next two variants cannot be a list operation now since that was handled in the
// Work.Token(SetContainsToken(a, op, b @ ScalarTagToken(tag))) case above
// They can be set-operations on a lift but not one that can be empty
case Work.Token(SetContainsToken(a, op, b)) =>
apply(Work.Token(stmt"$a $op ($b)") +: tail, matrowl, placeholderCount)
case Work.Token(ScalarTagToken(tag)) =>
val (liftsLength, liftsExpr, _) = placeholders(tag.uid, placeholderCount)
apply(tail, matrowl.dropIn(liftsExpr), '{ $placeholderCount + $liftsLength })

case Work.Token(Statement(tokens)) =>
apply(tokens.map(Work.Token(_)) ::: tail, matrowl, placeholderCount)
case Work.Token(_: ScalarLiftToken) =>
throw new UnsupportedOperationException("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.")
case Work.Token(_: QuotationTagToken) =>
throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.")
}
}
apply(List(Work.Token(token)), Matrowl.Bottom, Expr(0))
}
token2Expr(statement)
}
val liftsExpr: Expr[List[Planter[?, ?, ?]]] = Expr.ofList(lifts)
val queryExpr: Expr[Unparticular.Query] = UnparticularQueryLiftable(query)
'{ Dynamic[PrepareRowTemp]($queryExpr, $liftsExpr, $runtimeLiftingPlaceholder, $emptySetContainsToken) }
end Static

object Dynamic:
Expand Down
Loading