Skip to content

Commit

Permalink
fix(compiler): Refactor header semantics [LNG-352] (#1113)
Browse files Browse the repository at this point in the history
* Initial

* Fix import

* Refactor RawSemantics

* Refactor RawSemantics

* Remove initCtx for export

* Add fromInit

* Remove setInit

* Use init ctx

* Add a hack to ModuleSem

* Remove initCtx

* Refactor HeaderSem finalization

* Remove RawContext#init

* Combine with semigroup op

* Remove unnecessary typeclass

* Remove unused typeclass

* Remove unnecessary typeclass

* Remove unnecessary monoid

* Remove contants from monoid

* Fix tests

* Remove unused monoids

* Refactor declares

* Refactor module sem

* Refactor module sem

* Update LspSemantics

* Remove unused typeclass

* Remove unnecessary method

* Add comments
  • Loading branch information
InversionSpaces authored Apr 9, 2024
1 parent 07bea1a commit f29e44e
Show file tree
Hide file tree
Showing 22 changed files with 219 additions and 402 deletions.
9 changes: 5 additions & 4 deletions compiler/src/main/scala/aqua/compiler/AquaCompiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,16 @@ import aqua.parser.{Ast, ParserError}
import aqua.semantics.header.Picker.setImportPaths
import aqua.semantics.header.{HeaderHandler, Picker}
import aqua.semantics.{FileId, SemanticError, Semantics}

import cats.arrow.FunctionK
import cats.data.*
import cats.syntax.either.*
import cats.syntax.functor.*
import cats.syntax.show.*
import cats.{~>, Comonad, Monad, Monoid, Order, Show}
import cats.{Comonad, Monad, Monoid, Order, Show, ~>}
import scribe.Logging

class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Monoid: Picker](
class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Picker](
headerHandler: HeaderHandler[S, C],
semantics: Semantics[S, C]
) extends Logging {
Expand All @@ -38,11 +39,11 @@ class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Monoid: Picker](
// Analyze the body, with prepared initial context
_ = logger.trace("semantic processing...")
processed <- semantics
.process(body, headerSem.initCtx)
.process(body, headerSem.init)
.toCompileRes
// Handle exports, declares - finalize the resulting context
rc <- headerSem
.finCtx(processed)
.fin(processed)
.toCompileRes
} yield rc.setImportPaths(importPaths)

Expand Down
18 changes: 4 additions & 14 deletions compiler/src/main/scala/aqua/compiler/CompilerAPI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,24 +44,14 @@ object CompilerAPI extends Logging {
private def getAquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad](
config: AquaCompilerConf
): AquaCompiler[F, E, I, S, RawContext] = {
given Monoid[RawContext] = RawContext
.implicits(
RawContext.blank.copy(
parts = Chain
.fromSeq(config.constants ++ ConstantRaw.defaultConstants(config.relayVarName))
.map(const => RawContext.blank -> const)
)
)
.rawContextMonoid

val semantics = new RawSemantics[S]()

given LocationsAlgebra[S, State[RawContext, *]] =
DummyLocationsInterpreter()

new AquaCompiler[F, E, I, S, RawContext](
val constants = config.constants ++ ConstantRaw.defaultConstants(config.relayVarName)

new AquaCompiler(
new HeaderHandler(),
semantics
new RawSemantics(constants)
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,41 +20,14 @@ object LSPCompiler {
private def getLspAquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad](
config: AquaCompilerConf
): AquaCompiler[F, E, I, S, LspContext[S]] = {
given Monoid[LspContext[S]] = LspContext
.implicits(
LspContext.blank.copy(raw =
RawContext.blank.copy(
parts = Chain
.fromSeq(config.constants ++ ConstantRaw.defaultConstants(config.relayVarName))
.map(const => RawContext.blank -> const)
)
)
)
.lspContextMonoid

given Monoid[HeaderSem[S, LspContext[S]]] with {
override def empty: HeaderSem[S, LspContext[S]] =
HeaderSem(Monoid[LspContext[S]].empty, (c, _) => validNec(c))

override def combine(
a: HeaderSem[S, LspContext[S]],
b: HeaderSem[S, LspContext[S]]
): HeaderSem[S, LspContext[S]] = {
HeaderSem(
a.initCtx |+| b.initCtx,
(c, i) => a.finInitCtx(c, i).andThen(b.finInitCtx(_, i))
)
}
}

val semantics = new LspSemantics[S]()

given LocationsAlgebra[S, State[LspContext[S], *]] =
LocationsInterpreter[S, LspContext[S]]()

new AquaCompiler[F, E, I, S, LspContext[S]](
new HeaderHandler(),
semantics
val constants = config.constants ++ ConstantRaw.defaultConstants(config.relayVarName)

new AquaCompiler(
headerHandler = new HeaderHandler(),
semantics = new LspSemantics(constants)
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@ package aqua.lsp
import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token}
import aqua.raw.{RawContext, RawPart}
import aqua.semantics.header.Picker
import aqua.semantics.rules.locations.LocationsState
import aqua.semantics.rules.locations.{TokenLocation, VariableInfo}
import aqua.semantics.{SemanticError, SemanticWarning}
import aqua.types.{AbilityType, ArrowType, Type}
import aqua.semantics.rules.locations.LocationsState

import cats.syntax.monoid.*
import cats.{Monoid, Semigroup}
Expand All @@ -32,8 +32,10 @@ object LspContext {

def blank[S[_]]: LspContext[S] = LspContext[S](raw = RawContext())

given [S[_]]: Semigroup[LspContext[S]] =
(x: LspContext[S], y: LspContext[S]) =>
given [S[_]]: Monoid[LspContext[S]] with {
override def empty = blank[S]

override def combine(x: LspContext[S], y: LspContext[S]) =
LspContext[S](
raw = x.raw |+| y.raw,
abDefinitions = x.abDefinitions ++ y.abDefinitions,
Expand All @@ -45,21 +47,6 @@ object LspContext {
warnings = x.warnings ++ y.warnings,
importPaths = x.importPaths ++ y.importPaths
)

trait Implicits[S[_]] {
val lspContextMonoid: Monoid[LspContext[S]]
}

def implicits[S[_]](init: LspContext[S]): Implicits[S] = new Implicits[S] {

override val lspContextMonoid: Monoid[LspContext[S]] = new Monoid[LspContext[S]] {
override def empty: LspContext[S] = init

override def combine(x: LspContext[S], y: LspContext[S]): LspContext[S] = {
Semigroup[LspContext[S]].combine(x, y)
}
}

}

given [S[_]]: Picker[LspContext[S]] with {
Expand All @@ -85,13 +72,11 @@ object LspContext {
override def addPart(ctx: LspContext[S], part: (LspContext[S], RawPart)): LspContext[S] =
ctx.copy(raw = ctx.raw.addPart(part._1.raw -> part._2))

override def setInit(ctx: LspContext[S], ctxInit: Option[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ctx.raw.setInit(ctxInit.map(_.raw)))

override def all(ctx: LspContext[S]): Set[String] =
ctx.raw.all
override def module(ctx: LspContext[S]): Option[String] = ctx.raw.module
override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares

override def declaredNames(ctx: LspContext[S]): Set[String] = ctx.raw.declaredNames

override def allNames(ctx: LspContext[S]): Set[String] = ctx.raw.allNames

override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] =
ctx.copy(
Expand All @@ -103,15 +88,23 @@ object LspContext {
)
)

override def setImportPaths(ctx: LspContext[S], importPaths: Map[String, String]): LspContext[S] =
override def setImportPaths(
ctx: LspContext[S],
importPaths: Map[String, String]
): LspContext[S] =
ctx.copy(importPaths = importPaths)

override def setModule(
ctx: LspContext[S],
name: Option[String],
name: String
): LspContext[S] =
ctx.copy(raw = ctx.raw.setModule(name))

override def setDeclares(
ctx: LspContext[S],
declares: Set[String]
): LspContext[S] =
ctx.copy(raw = ctx.raw.setOptModule(name, declares))
ctx.copy(raw = ctx.raw.setDeclares(declares))

override def setExports(
ctx: LspContext[S],
Expand Down Expand Up @@ -154,22 +147,20 @@ object LspContext {

override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)

override def pickDeclared(
ctx: LspContext[S]
)(using Semigroup[LspContext[S]]): LspContext[S] =
override def pickDeclared(ctx: LspContext[S]): LspContext[S] =
ctx.copy(raw = ctx.raw.pickDeclared)
}

/*
NOTE: This instance is used to generate LocationsAlgebra[S, State[LspContext[S], *]]
to reuse the code from the body semantics in the header semantics
to reuse the code from the body semantics in the header semantics
*/
given [S[_]]: Lens[LspContext[S], LocationsState[S]] = {
val get: LspContext[S] => LocationsState[S] =
val get: LspContext[S] => LocationsState[S] =
ctx => LocationsState(ctx.variables)
val replace: LocationsState[S] => LspContext[S] => LspContext[S] =
locs => ctx => ctx.copy(variables = locs.variables)

Lens(get)(replace)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ package aqua.lsp
import aqua.parser.Ast
import aqua.parser.head.{ImportExpr, ImportFromExpr, UseExpr, UseFromExpr}
import aqua.parser.lexer.{LiteralToken, Token}
import aqua.raw.ConstantRaw
import aqua.semantics.header.Picker.*
import aqua.semantics.rules.locations.LocationsState
import aqua.semantics.{CompilerState, RawSemantics, SemanticError, SemanticWarning, Semantics}

Expand All @@ -18,7 +20,9 @@ import cats.syntax.reducible.*
import monocle.Lens
import monocle.macros.GenLens

class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
class LspSemantics[S[_]](
constants: List[ConstantRaw] = Nil
) extends Semantics[S, LspContext[S]] {

private def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] =
ast.head.collect {
Expand All @@ -38,11 +42,12 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
init: LspContext[S]
): ProcessResult = {

val rawState = CompilerState.init[S](init.raw)
val withConstants = init.addFreeParts(constants)
val rawState = CompilerState.init[S](withConstants.raw)

val initState = rawState.copy(
locations = rawState.locations.copy(
variables = rawState.locations.variables ++ init.variables
variables = rawState.locations.variables ++ withConstants.variables
)
)

Expand All @@ -55,7 +60,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
new LocationsInterpreter[S, CompilerState[S]]()

RawSemantics
.interpret(ast, initState, init.raw)
.interpret(ast, withConstants.raw)
.run(initState)
.map { case (state, ctx) =>
LspContext(
raw = ctx,
Expand Down
18 changes: 6 additions & 12 deletions model/raw/src/main/scala/aqua/raw/Raw.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package aqua.raw

import aqua.raw.RawPart.contextPart
import aqua.raw.ops.{FuncOp, RawTag}

import cats.Semigroup
import cats.syntax.semigroup.*

Expand All @@ -13,25 +15,17 @@ object Raw {

case class Empty(log: String) extends Raw

given Semigroup[Raw] with

import RawPart.RPSMonoid
import RawPart.contextPart
given Semigroup[Raw] with {

override def combine(x: Raw, y: Raw): Raw =
(x, y) match {
case (l: FuncOp, r: FuncOp) =>
FuncOp(l.tree |+| r.tree)
case (l: FuncOp, r: FuncOp) => FuncOp(l.tree |+| r.tree)

case (l: Empty, r: Empty) => Empty(l.log + " |+| " + r.log)
case (_: Empty, r) => r
case (l, _: Empty) => l

case (l, r) =>
RPSMonoid.combine(
contextPart(l),
contextPart(r)
)

case (l, r) => contextPart(l) |+| contextPart(r)
}
}
}
40 changes: 10 additions & 30 deletions model/raw/src/main/scala/aqua/raw/RawContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ import scala.collection.immutable.SortedMap
/**
* RawContext is essentially a model of the source code – the first one we get to from the AST.
*
* @param init
* Initial context – collected imports, needed for re-exporting in AquaContext later
* @param module
* This file's module name
* @param declares
Expand All @@ -29,7 +27,6 @@ import scala.collection.immutable.SortedMap
* Abilities (e.g. used contexts) available in the scope
*/
case class RawContext(
init: Option[RawContext] = None,
module: Option[String] = None,
declares: Set[String] = Set.empty,
exports: Map[String, Option[String]] = Map.empty,
Expand Down Expand Up @@ -89,16 +86,11 @@ case class RawContext(
lazy val allDefinedAbilities: Map[String, AbilityType] =
all(_.definedAbilities)

def `type`(name: String): Option[StructType] =
NonEmptyMap
.fromMap(
SortedMap.from(
collectPartsMap {
case rp if declares(rp.name) || module.isEmpty => rp.rawPartType
}
)
)
.map(StructType(name, _))
lazy val allNames: Set[String] =
parts.map { case (_, p) => p.name }.toList.toSet

lazy val declaredNames: Set[String] =
allNames.filter(declares.contains)

override def toString: String =
s"""|module: ${module.getOrElse("unnamed")}
Expand All @@ -113,29 +105,17 @@ case class RawContext(
object RawContext {
val blank: RawContext = RawContext()

given Semigroup[RawContext] =
(x: RawContext, y: RawContext) =>
given Monoid[RawContext] with {

override def empty: RawContext = blank

override def combine(x: RawContext, y: RawContext) =
RawContext(
x.init.flatMap(xi => y.init.map(xi |+| _)) orElse x.init orElse y.init,
x.module orElse y.module,
x.declares ++ y.declares,
x.exports ++ y.exports,
x.parts ++ y.parts,
x.abilities ++ y.abilities
)

trait Implicits {
val rawContextMonoid: Monoid[RawContext]
}

def implicits(init: RawContext): Implicits = new Implicits {

override val rawContextMonoid: Monoid[RawContext] = new Monoid[RawContext] {
override def empty: RawContext = init

override def combine(x: RawContext, y: RawContext): RawContext =
Semigroup[RawContext].combine(x, y)
}

}
}
Loading

0 comments on commit f29e44e

Please sign in to comment.