diff --git a/.gitignore b/.gitignore index c9923ed..401fbda 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,7 @@ project/plugins/project/ # Vagrant .vagrant VERSION +.bloop +.metals +metals.sbt +.vscode diff --git a/.scalafmt.conf b/.scalafmt.conf index 56bec31..bd54bfd 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -20,4 +20,11 @@ rewrite.rules = [ RedundantBraces, RedundantParens, PreferCurlyFors -] \ No newline at end of file +] +runner.dialect = scala212 + +fileOverride { + "glob:**/scala-3/**/*.scala" { + runner.dialect = dotty + } +} diff --git a/build.sbt b/build.sbt index 08ae09f..8b125a3 100644 --- a/build.sbt +++ b/build.sbt @@ -18,8 +18,8 @@ lazy val root = project name := "snowplow-scala-analytics-sdk", organization := "com.snowplowanalytics", description := "Scala analytics SDK for Snowplow", - scalaVersion := "2.13.3", - crossScalaVersions := Seq("2.12.11", "2.13.3") + scalaVersion := "2.13.10", + crossScalaVersions := Seq("2.12.17", "2.13.10", "3.2.1") ) ) .enablePlugins(SiteScaladocPlugin) diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index fe001f4..2ed3c37 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -35,15 +35,23 @@ object BuildSettings { // Basic settings for our app lazy val buildSettings = Seq( - scalacOptions := Seq( + scalacOptions ++= Seq( "-deprecation", "-encoding", "UTF-8", "-feature", - "-unchecked", - "-Ywarn-dead-code", - "-Ywarn-numeric-widen", - "-Ywarn-value-discard" - ) + "-unchecked" + ), + scalacOptions ++= { + if (scalaVersion.value.startsWith("3")) { + Seq("-Xmax-inlines", "150") + } else { + Seq( + "-Ywarn-dead-code", + "-Ywarn-numeric-widen", + "-Ywarn-value-discard" + ) + } + } ) lazy val dynVerSettings = Seq( @@ -67,40 +75,46 @@ object BuildSettings { ) ) - // If new version introduces breaking changes, - // clear-out mimaBinaryIssueFilters and mimaPreviousVersions. - // Otherwise, add previous version to set without - // removing other versions. - val mimaPreviousVersions = Set() - - val mimaSettings = Seq( - mimaPreviousArtifacts := mimaPreviousVersions.map { organization.value %% name.value % _ }, + // If new version introduces breaking changes, clear out the lists of previous version. + // Otherwise, add previous version to set without removing other versions. + val mimaPreviousVersionsScala2 = Set("3.0.1") + val mimaPreviousVersionsScala3 = Set() + lazy val mimaSettings = Seq( + mimaPreviousArtifacts := { + val versionsForBuild = + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((3, _)) => + mimaPreviousVersionsScala3 + case _ => + mimaPreviousVersionsScala2 + } + + versionsForBuild.map { organization.value %% name.value % _ } + }, ThisBuild / mimaFailOnNoPrevious := false, mimaBinaryIssueFilters ++= Seq(), - Test / test := { - mimaReportBinaryIssues.value - (Test / test).value - } + Test / test := (Test / test).dependsOn(mimaReportBinaryIssues).value ) val scoverageSettings = Seq( - coverageMinimum := 50, + coverageMinimumStmtTotal := 50, // Excluded because of shapeless, which would generate 1000x500KB statements driving coverage OOM coverageExcludedFiles := """.*\/Event.*;""", coverageFailOnMinimum := true, coverageHighlighting := false, - (test in Test) := { - (coverageReport dependsOn (test in Test)).value + (Test / test) := { + (coverageReport dependsOn (Test / test)).value } ) lazy val sbtSiteSettings = Seq( - siteSubdirName in SiteScaladoc := s"${version.value}", - preprocessVars in Preprocess := Map("VERSION" -> version.value) + SiteScaladoc / siteSubdirName := s"${version.value}", + Preprocess / preprocessVars := Map("VERSION" -> version.value) ) lazy val formattingSettings = Seq( scalafmtConfig := file(".scalafmt.conf"), scalafmtOnCompile := true ) + } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index abd65f7..1cf3437 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -15,12 +15,12 @@ import sbt._ object Dependencies { object V { - val igluCore = "1.0.0" - val cats = "2.1.1" - val circe = "0.14.1" + val igluCore = "1.1.3" + val cats = "2.9.0" + val circe = "0.14.3" // Scala (test only) - val specs2 = "4.8.0" - val scalaCheck = "1.15.0" + val specs2 = "4.19.0" + val scalaCheck = "1.17.0" } val igluCore = "com.snowplowanalytics" %% "iglu-core-circe" % V.igluCore diff --git a/project/build.properties b/project/build.properties index 08e4d79..46e43a9 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.1 +sbt.version=1.8.2 diff --git a/project/plugins.sbt b/project/plugins.sbt index 579d9fd..28f37e9 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,10 +1,11 @@ -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.5.0") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.0") addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.1.1") -addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") \ No newline at end of file +addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") + +libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala b/src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala similarity index 97% rename from src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala rename to src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala index cf13e17..83ee32e 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala +++ b/src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala @@ -64,7 +64,7 @@ object Parser { ): Parser[A] = new Parser[A] { type HTSV = L - val knownKeys: List[Symbol] = keys.apply.toList[Symbol] + val knownKeys: List[Symbol] = keys().toList[Symbol] val decoder: RowDecoder[L] = rowDecoder val generic: Generic.Aux[A, L] = gen } diff --git a/src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala b/src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala new file mode 100644 index 0000000..c44b453 --- /dev/null +++ b/src/main/scala-2/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.analytics.scalasdk.decode + +import shapeless._ +import cats.syntax.validated._ +import cats.syntax.either._ +import cats.syntax.apply._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError + +private[scalasdk] trait RowDecoderCompanion { + import HList.ListCompat._ + + def apply[L <: HList](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow + + def fromFunc[L <: HList](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] = + new RowDecoder[L] { + def apply(row: List[(Key, String)]) = f(row) + } + + /** Parse TSV row into HList */ + private def parse[H: ValueDecoder, T <: HList: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H :: T] = + row match { + case h :: t => + val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel + val tv: RowDecodeResult[T] = RowDecoder[T].apply(t) + (hv, tv).mapN(_ :: _) + case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel + } + + implicit def hnilFromRow: RowDecoder[HNil] = + fromFunc { + case Nil => HNil.validNel + case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel + } + + implicit def hconsFromRow[H: ValueDecoder, T <: HList: RowDecoder]: RowDecoder[H :: T] = + fromFunc(row => parse(row)) +} diff --git a/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala b/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala new file mode 100644 index 0000000..0c2b077 --- /dev/null +++ b/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.analytics.scalasdk.decode + +import cats.data.{NonEmptyList, Validated} +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.{FieldNumberMismatch, NotTSV, RowDecodingError} +import scala.deriving._ +import scala.compiletime._ + +private[scalasdk] trait Parser[A] extends Serializable { + + /** List of field names defined on `A` */ + def knownKeys: List[Key] // TODO: should be a part of `RowDecoder` + + protected def decoder: RowDecoder[A] + + def parse(row: String): DecodeResult[A] = { + val values = row.split("\t", -1) + if (values.length == 1) Validated.Invalid(NotTSV) + else if (values.length != knownKeys.length) Validated.Invalid(FieldNumberMismatch(values.length)) + else { + val zipped = knownKeys.zip(values) + decoder(zipped).leftMap(e => RowDecodingError(e)) + } + } +} + +object Parser { + sealed trait DeriveParser[A] { + inline def get(implicit mirror: Mirror.ProductOf[A]): Parser[A] = + new Parser[A] { + val knownKeys: List[Symbol] = constValueTuple[mirror.MirroredElemLabels].toArray.map(s => Symbol(s.toString)).toList + val decoder: RowDecoder[A] = RowDecoder.of[A] + } + } + + /** Derive a TSV parser for `A` */ + private[scalasdk] def deriveFor[A]: DeriveParser[A] = + new DeriveParser[A] {} +} diff --git a/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala b/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala new file mode 100644 index 0000000..47d5bfc --- /dev/null +++ b/src/main/scala-3/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoderCompanion.scala @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.analytics.scalasdk.decode + +import cats.syntax.validated._ +import cats.syntax.either._ +import cats.syntax.apply._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError +import scala.deriving._ +import scala.compiletime._ + +private[scalasdk] trait RowDecoderCompanion { + def tupled[L <: Tuple](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow + + def fromFunc[L <: Tuple](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] = + new RowDecoder[L] { + def apply(row: List[(Key, String)]) = f(row) + } + + /** Parse TSV row into HList */ + private def parse[H: ValueDecoder, T <: Tuple: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H *: T] = + row match { + case h :: t => + val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel + val tv: RowDecodeResult[T] = RowDecoder.tupled[T].apply(t) + (hv, tv).mapN(_ *: _) + case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel + } + + implicit def hnilFromRow: RowDecoder[EmptyTuple] = + fromFunc { + case Nil => EmptyTuple.validNel + case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel + } + + implicit def hconsFromRow[H: ValueDecoder, T <: Tuple: RowDecoder]: RowDecoder[H *: T] = + fromFunc(row => parse(row)) + + inline def of[A](implicit m: Mirror.ProductOf[A]) = { + val instance = summonInline[RowDecoder[m.MirroredElemTypes]] + instance.map(tuple => m.fromTuple(tuple)) + } + +} diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala index 4facccc..87b9370 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala @@ -29,7 +29,7 @@ object ParsingError { /** * Represents an error indicating a non-TSV line. */ - final case object NotTSV extends ParsingError + case object NotTSV extends ParsingError /** * Represents an error indicating the number of actual fields is not equal @@ -105,7 +105,7 @@ object ParsingError { cursor .downField("message") .as[String] - .map(UnhandledRowDecodingError) + .map(UnhandledRowDecodingError(_)) } } yield result } @@ -138,12 +138,12 @@ object ParsingError { cursor .downField("fieldCount") .as[Int] - .map(FieldNumberMismatch) + .map(FieldNumberMismatch(_)) case "RowDecodingError" => cursor .downField("errors") .as[NonEmptyList[RowDecodingErrorInfo]] - .map(RowDecodingError) + .map(RowDecodingError(_)) case _ => DecodingFailure(s"Error type $error is not an Analytics SDK Parsing Error.", cursor.history).asLeft } diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala index ac7df2c..84435d5 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala @@ -14,7 +14,6 @@ package com.snowplowanalytics.snowplow.analytics.scalasdk // circe import io.circe.syntax._ -import io.circe.generic.semiauto._ import io.circe.{Decoder, DecodingFailure, Encoder, Json, JsonObject} import io.circe.CursorOp.DownField @@ -41,7 +40,7 @@ object SnowplowEvent { } implicit final val unstructCirceEncoder: Encoder[UnstructEvent] = - Encoder.instance { unstructEvent: UnstructEvent => + Encoder.instance { unstructEvent => if (unstructEvent.data.isEmpty) Json.Null else JsonObject( @@ -50,7 +49,7 @@ object SnowplowEvent { ).asJson } - implicit val unstructEventDecoder: Decoder[UnstructEvent] = deriveDecoder[UnstructEvent].recover { + implicit val unstructEventDecoder: Decoder[UnstructEvent] = Decoder.forProduct1("data")(UnstructEvent.apply).recover { case DecodingFailure(_, DownField("data") :: _) => UnstructEvent(None) } @@ -72,7 +71,7 @@ object SnowplowEvent { } implicit final val contextsCirceEncoder: Encoder[Contexts] = - Encoder.instance { contexts: Contexts => + Encoder.instance { contexts => if (contexts.data.isEmpty) JsonObject.empty.asJson else JsonObject( @@ -81,7 +80,7 @@ object SnowplowEvent { ).asJson } - implicit val contextsDecoder: Decoder[Contexts] = deriveDecoder[Contexts].recover { + implicit val contextsDecoder: Decoder[Contexts] = Decoder.forProduct1("data")(Contexts.apply).recover { case DecodingFailure(_, DownField("data") :: _) => Contexts(List()) } diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala index ffac25a..003025f 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala @@ -10,49 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.analytics.scalasdk.decode +package com.snowplowanalytics.snowplow.analytics.scalasdk +package decode -import shapeless._ -import cats.syntax.validated._ -import cats.syntax.either._ -import cats.syntax.apply._ -import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError - -/** - * Type class to decode List of keys-value into HList - * Keys derived from original class of HList, - * Values are actual TSV columns - */ -private[scalasdk] trait RowDecoder[L <: HList] extends Serializable { +private[scalasdk] trait RowDecoder[L] extends Serializable { self => def apply(row: List[(Key, String)]): RowDecodeResult[L] -} - -private[scalasdk] object RowDecoder { - import HList.ListCompat._ - - def apply[L <: HList](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow - - def fromFunc[L <: HList](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] = - new RowDecoder[L] { - def apply(row: List[(Key, String)]) = f(row) + def map[B](f: L => B): RowDecoder[B] = + new RowDecoder[B] { + def apply(row: List[(Key, String)]): RowDecodeResult[B] = self.apply(row).map(f) } - - /** Parse TSV row into HList */ - private def parse[H: ValueDecoder, T <: HList: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H :: T] = - row match { - case h :: t => - val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel - val tv: RowDecodeResult[T] = RowDecoder[T].apply(t) - (hv, tv).mapN(_ :: _) - case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel - } - - implicit def hnilFromRow: RowDecoder[HNil] = - fromFunc { - case Nil => HNil.validNel - case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel - } - - implicit def hconsFromRow[H: ValueDecoder, T <: HList: RowDecoder]: RowDecoder[H :: T] = - fromFunc(row => parse(row)) } + +object RowDecoder extends RowDecoderCompanion