Skip to content

Commit

Permalink
Build with Scala-3 (close #124)
Browse files Browse the repository at this point in the history
Upgrade libraries
Move shapeless code into scala-2 source directory
Use scala.derving.Mirror for scala-3 code
  • Loading branch information
hamnis authored and pondzix committed Nov 29, 2023
1 parent 47e9948 commit 347aa08
Show file tree
Hide file tree
Showing 14 changed files with 233 additions and 89 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,7 @@ project/plugins/project/
# Vagrant
.vagrant
VERSION
.bloop
.metals
metals.sbt
.vscode
9 changes: 8 additions & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,11 @@ rewrite.rules = [
RedundantBraces,
RedundantParens,
PreferCurlyFors
]
]
runner.dialect = scala212

fileOverride {
"glob:**/scala-3/**/*.scala" {
runner.dialect = dotty
}
}
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ lazy val root = project
name := "snowplow-scala-analytics-sdk",
organization := "com.snowplowanalytics",
description := "Scala analytics SDK for Snowplow",
scalaVersion := "2.13.3",
crossScalaVersions := Seq("2.12.11", "2.13.3")
scalaVersion := "2.13.10",
crossScalaVersions := Seq("2.12.17", "2.13.10", "3.2.1")
)
)
.enablePlugins(SiteScaladocPlugin)
Expand Down
60 changes: 37 additions & 23 deletions project/BuildSettings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,23 @@ object BuildSettings {

// Basic settings for our app
lazy val buildSettings = Seq(
scalacOptions := Seq(
scalacOptions ++= Seq(
"-deprecation",
"-encoding", "UTF-8",
"-feature",
"-unchecked",
"-Ywarn-dead-code",
"-Ywarn-numeric-widen",
"-Ywarn-value-discard"
)
"-unchecked"
),
scalacOptions ++= {
if (scalaVersion.value.startsWith("3")) {
Seq("-Xmax-inlines", "150")
} else {
Seq(
"-Ywarn-dead-code",
"-Ywarn-numeric-widen",
"-Ywarn-value-discard"
)
}
}
)

lazy val dynVerSettings = Seq(
Expand All @@ -67,40 +75,46 @@ object BuildSettings {
)
)

// If new version introduces breaking changes,
// clear-out mimaBinaryIssueFilters and mimaPreviousVersions.
// Otherwise, add previous version to set without
// removing other versions.
val mimaPreviousVersions = Set()

val mimaSettings = Seq(
mimaPreviousArtifacts := mimaPreviousVersions.map { organization.value %% name.value % _ },
// If new version introduces breaking changes, clear out the lists of previous version.
// Otherwise, add previous version to set without removing other versions.
val mimaPreviousVersionsScala2 = Set("3.0.1")
val mimaPreviousVersionsScala3 = Set()
lazy val mimaSettings = Seq(
mimaPreviousArtifacts := {
val versionsForBuild =
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) =>
mimaPreviousVersionsScala3
case _ =>
mimaPreviousVersionsScala2
}

versionsForBuild.map { organization.value %% name.value % _ }
},
ThisBuild / mimaFailOnNoPrevious := false,
mimaBinaryIssueFilters ++= Seq(),
Test / test := {
mimaReportBinaryIssues.value
(Test / test).value
}
Test / test := (Test / test).dependsOn(mimaReportBinaryIssues).value
)

val scoverageSettings = Seq(
coverageMinimum := 50,
coverageMinimumStmtTotal := 50,
// Excluded because of shapeless, which would generate 1000x500KB statements driving coverage OOM
coverageExcludedFiles := """.*\/Event.*;""",
coverageFailOnMinimum := true,
coverageHighlighting := false,
(test in Test) := {
(coverageReport dependsOn (test in Test)).value
(Test / test) := {
(coverageReport dependsOn (Test / test)).value
}
)

lazy val sbtSiteSettings = Seq(
siteSubdirName in SiteScaladoc := s"${version.value}",
preprocessVars in Preprocess := Map("VERSION" -> version.value)
SiteScaladoc / siteSubdirName := s"${version.value}",
Preprocess / preprocessVars := Map("VERSION" -> version.value)
)

lazy val formattingSettings = Seq(
scalafmtConfig := file(".scalafmt.conf"),
scalafmtOnCompile := true
)

}
10 changes: 5 additions & 5 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ import sbt._
object Dependencies {

object V {
val igluCore = "1.0.0"
val cats = "2.1.1"
val circe = "0.14.1"
val igluCore = "1.1.3"
val cats = "2.9.0"
val circe = "0.14.3"
// Scala (test only)
val specs2 = "4.8.0"
val scalaCheck = "1.15.0"
val specs2 = "4.19.0"
val scalaCheck = "1.17.0"
}

val igluCore = "com.snowplowanalytics" %% "iglu-core-circe" % V.igluCore
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.4.1
sbt.version=1.8.2
11 changes: 6 additions & 5 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.5.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.1.1")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1")

libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ object Parser {
): Parser[A] =
new Parser[A] {
type HTSV = L
val knownKeys: List[Symbol] = keys.apply.toList[Symbol]
val knownKeys: List[Symbol] = keys().toList[Symbol]
val decoder: RowDecoder[L] = rowDecoder
val generic: Generic.Aux[A, L] = gen
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.analytics.scalasdk.decode

import shapeless._
import cats.syntax.validated._
import cats.syntax.either._
import cats.syntax.apply._
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError

private[scalasdk] trait RowDecoderCompanion {
import HList.ListCompat._

def apply[L <: HList](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow

def fromFunc[L <: HList](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] =
new RowDecoder[L] {
def apply(row: List[(Key, String)]) = f(row)
}

/** Parse TSV row into HList */
private def parse[H: ValueDecoder, T <: HList: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H :: T] =
row match {
case h :: t =>
val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel
val tv: RowDecodeResult[T] = RowDecoder[T].apply(t)
(hv, tv).mapN(_ :: _)
case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel
}

implicit def hnilFromRow: RowDecoder[HNil] =
fromFunc {
case Nil => HNil.validNel
case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel
}

implicit def hconsFromRow[H: ValueDecoder, T <: HList: RowDecoder]: RowDecoder[H :: T] =
fromFunc(row => parse(row))
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.analytics.scalasdk.decode

import cats.data.{NonEmptyList, Validated}
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.{FieldNumberMismatch, NotTSV, RowDecodingError}
import scala.deriving._
import scala.compiletime._

private[scalasdk] trait Parser[A] extends Serializable {

/** List of field names defined on `A` */
def knownKeys: List[Key] // TODO: should be a part of `RowDecoder`

protected def decoder: RowDecoder[A]

def parse(row: String): DecodeResult[A] = {
val values = row.split("\t", -1)
if (values.length == 1) Validated.Invalid(NotTSV)
else if (values.length != knownKeys.length) Validated.Invalid(FieldNumberMismatch(values.length))
else {
val zipped = knownKeys.zip(values)
decoder(zipped).leftMap(e => RowDecodingError(e))
}
}
}

object Parser {
sealed trait DeriveParser[A] {
inline def get(implicit mirror: Mirror.ProductOf[A]): Parser[A] =
new Parser[A] {
val knownKeys: List[Symbol] = constValueTuple[mirror.MirroredElemLabels].toArray.map(s => Symbol(s.toString)).toList
val decoder: RowDecoder[A] = RowDecoder.of[A]
}
}

/** Derive a TSV parser for `A` */
private[scalasdk] def deriveFor[A]: DeriveParser[A] =
new DeriveParser[A] {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.analytics.scalasdk.decode

import cats.syntax.validated._
import cats.syntax.either._
import cats.syntax.apply._
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError
import scala.deriving._
import scala.compiletime._

private[scalasdk] trait RowDecoderCompanion {
def tupled[L <: Tuple](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow

def fromFunc[L <: Tuple](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] =
new RowDecoder[L] {
def apply(row: List[(Key, String)]) = f(row)
}

/** Parse TSV row into HList */
private def parse[H: ValueDecoder, T <: Tuple: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H *: T] =
row match {
case h :: t =>
val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel
val tv: RowDecodeResult[T] = RowDecoder.tupled[T].apply(t)
(hv, tv).mapN(_ *: _)
case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel
}

implicit def hnilFromRow: RowDecoder[EmptyTuple] =
fromFunc {
case Nil => EmptyTuple.validNel
case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel
}

implicit def hconsFromRow[H: ValueDecoder, T <: Tuple: RowDecoder]: RowDecoder[H *: T] =
fromFunc(row => parse(row))

inline def of[A](implicit m: Mirror.ProductOf[A]) = {
val instance = summonInline[RowDecoder[m.MirroredElemTypes]]
instance.map(tuple => m.fromTuple(tuple))
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ object ParsingError {
/**
* Represents an error indicating a non-TSV line.
*/
final case object NotTSV extends ParsingError
case object NotTSV extends ParsingError

/**
* Represents an error indicating the number of actual fields is not equal
Expand Down Expand Up @@ -105,7 +105,7 @@ object ParsingError {
cursor
.downField("message")
.as[String]
.map(UnhandledRowDecodingError)
.map(UnhandledRowDecodingError(_))
}
} yield result
}
Expand Down Expand Up @@ -138,12 +138,12 @@ object ParsingError {
cursor
.downField("fieldCount")
.as[Int]
.map(FieldNumberMismatch)
.map(FieldNumberMismatch(_))
case "RowDecodingError" =>
cursor
.downField("errors")
.as[NonEmptyList[RowDecodingErrorInfo]]
.map(RowDecodingError)
.map(RowDecodingError(_))
case _ =>
DecodingFailure(s"Error type $error is not an Analytics SDK Parsing Error.", cursor.history).asLeft
}
Expand Down
Loading

0 comments on commit 347aa08

Please sign in to comment.