From f95704d7a6085a669d376664e17f930ddd4c881e Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Fri, 5 Jul 2019 14:50:46 +0300 Subject: [PATCH 01/12] Make parsing errors type-safe (closes #75) --- .../ParsingError.scala | 149 +++++++++++++ .../decode/Parser.scala | 22 +- .../decode/RowDecoder.scala | 18 +- .../decode/ValueDecoder.scala | 36 ++-- .../decode/package.scala | 23 +- .../EventSpec.scala | 197 ++++++++++++++++-- .../ParsingErrorSpec.scala | 103 +++++++++ .../decode/ValueDecoderSpec.scala | 23 +- 8 files changed, 505 insertions(+), 66 deletions(-) create mode 100644 src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala create mode 100644 src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingErrorSpec.scala diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala new file mode 100644 index 0000000..3d0e1f6 --- /dev/null +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingError.scala @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.analytics.scalasdk + +import cats.data.NonEmptyList +import cats.syntax.either._ +import io.circe._ +import io.circe.syntax._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.decode.Key +import com.snowplowanalytics.snowplow.analytics.scalasdk.decode.Key._ + +/** + * Represents an error raised when parsing a TSV line. + */ +sealed trait ParsingError extends Product with Serializable + +object ParsingError { + + /** + * Represents an error indicating a non-TSV line. + */ + final case object NotTSV extends ParsingError + + /** + * Represents an error indicating the number of actual fields is not equal + * to the number of expected fields. + * @param fieldCount The number of fields in the TSV line. + */ + final case class FieldNumberMismatch(fieldCount: Int) extends ParsingError + + /** + * Represents an error raised when trying to decode the values in a line. + * @param errors A non-empty list of errors encountered when trying to decode the values. + */ + final case class RowDecodingError(errors: NonEmptyList[RowDecodingErrorInfo]) extends ParsingError + + /** + * Contains information about the reasons behind errors raised when trying to decode the values in a line. + */ + sealed trait RowDecodingErrorInfo extends Product with Serializable + + object RowDecodingErrorInfo { + /** + * Represents cases where tha value in a field is not valid, + * e.g. an invalid timestamp, an invalid UUID, etc. + * @param key The name of the field. + * @param value The value of field. + * @param message The error message. + */ + final case class InvalidValue(key: Key, value: String, message: String) extends RowDecodingErrorInfo + + /** + * Represents unhandled errors raised when trying to decode a line. + * For example, while parsing a list of tuples to [[HList]] in + * [[RowDecoder]], type checking should make it impossible to get more or less values + * than expected. + * @param message The error message. + */ + final case class UnhandledRowDecodingError(message: String) extends RowDecodingErrorInfo + + implicit val analyticsSdkRowDecodingErrorInfoCirceEncoder: Encoder[RowDecodingErrorInfo] = + Encoder.instance { + case InvalidValue(key, value, message) => + Json.obj( + "type" := "InvalidValue", + "key" := key, + "value" := value, + "message" := message + ) + case UnhandledRowDecodingError(message: String) => + Json.obj( + "type" := "UnhandledRowDecodingError", + "message" := message + ) + } + + implicit val analyticsSdkRowDecodingErrorInfoCirceDecoder: Decoder[RowDecodingErrorInfo] = + Decoder.instance { cursor => + for { + errorType <- cursor.downField("type").as[String] + result <- errorType match { + case "InvalidValue" => + for { + key <- cursor.downField("key").as[Key] + value <- cursor.downField("value").as[String] + message <- cursor.downField("message").as[String] + } yield InvalidValue(key, value, message) + + case "UnhandledRowDecodingError" => + cursor + .downField("message") + .as[String] + .map(UnhandledRowDecodingError) + } + } yield result + } + } + + implicit val analyticsSdkParsingErrorCirceEncoder: Encoder[ParsingError] = + Encoder.instance { + case NotTSV => + Json.obj("type" := "NotTSV") + case FieldNumberMismatch(fieldCount) => + Json.obj( + "type" := "FieldNumberMismatch", + "fieldCount" := fieldCount + ) + case RowDecodingError(errors) => + Json.obj( + "type" := "RowDecodingError", + "errors" := errors.asJson + ) + } + + implicit val analyticsSdkParsingErrorCirceDecoder: Decoder[ParsingError] = + Decoder.instance { cursor => + for { + error <- cursor.downField("type").as[String] + result <- error match { + case "NotTSV" => + NotTSV.asRight + case "FieldNumberMismatch" => + cursor + .downField("fieldCount") + .as[Int] + .map(FieldNumberMismatch) + case "RowDecodingError" => + cursor + .downField("errors") + .as[NonEmptyList[RowDecodingErrorInfo]] + .map(RowDecodingError) + case _ => + DecodingFailure( + s"Error type $error is not an Analytics SDK Parsing Error.", + cursor.history).asLeft + } + } yield result + } +} diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala index 2b88f02..58b610a 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/Parser.scala @@ -15,8 +15,8 @@ package com.snowplowanalytics.snowplow.analytics.scalasdk.decode import shapeless._ import shapeless.ops.record._ import shapeless.ops.hlist._ - -import Parser._ +import cats.data.{NonEmptyList, Validated} +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.{FieldNumberMismatch, NotTSV, RowDecodingError} private[scalasdk] trait Parser[A] extends Serializable { /** Heterogeneous TSV values */ @@ -33,9 +33,16 @@ private[scalasdk] trait Parser[A] extends Serializable { def parse(row: String): DecodeResult[A] = { val values = row.split("\t", -1) - val zipped = knownKeys.zipAll(values, UnknownKeyPlaceholder, ValueIsMissingPlaceholder) - val decoded = decoder(zipped) - decoded.map { decodedValue => generic.from(decodedValue) } + if (values.length == 1) { + Validated.Invalid(NotTSV) + } + else if (values.length != knownKeys.length) { + Validated.Invalid(FieldNumberMismatch(values.length)) + } else { + val zipped = knownKeys.zip(values) + val decoded = decoder(zipped).leftMap(e => RowDecodingError(e)) + decoded.map { decodedValue => generic.from(decodedValue) } + } } } @@ -61,11 +68,6 @@ object Parser { } } - /** Key name that will be used if TSV has more columns than a class */ - val UnknownKeyPlaceholder = 'UnknownKey - /** Value that will be used if class has more fields than a TSV */ - val ValueIsMissingPlaceholder = "VALUE IS MISSING" - /** Derive a TSV parser for `A` */ private[scalasdk] def deriveFor[A]: DeriveParser[A] = new DeriveParser[A] {} diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala index 91ddb45..5b1d22b 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/RowDecoder.scala @@ -16,6 +16,7 @@ import shapeless._ import cats.syntax.validated._ import cats.syntax.either._ import cats.syntax.apply._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo.UnhandledRowDecodingError /** * Type class to decode List of keys-value pairs into HList @@ -23,7 +24,7 @@ import cats.syntax.apply._ * Values are actual TSV columns */ private[scalasdk] trait RowDecoder[L <: HList] extends Serializable { - def apply(row: List[(Key, String)]): DecodeResult[L] + def apply(row: List[(Key, String)]): RowDecodeResult[L] } private[scalasdk] object RowDecoder { @@ -31,25 +32,24 @@ private[scalasdk] object RowDecoder { def apply[L <: HList](implicit fromRow: RowDecoder[L]): RowDecoder[L] = fromRow - def fromFunc[L <: HList](f: List[(Key, String)] => DecodeResult[L]): RowDecoder[L] = + def fromFunc[L <: HList](f: List[(Key, String)] => RowDecodeResult[L]): RowDecoder[L] = new RowDecoder[L] { def apply(row: List[(Key, String)]) = f(row) } /** Parse TSV row into HList */ - private def parse[H: ValueDecoder, T <: HList: RowDecoder](row: List[(Key, String)]) = + private def parse[H: ValueDecoder, T <: HList: RowDecoder](row: List[(Key, String)]): RowDecodeResult[H :: T] = row match { case h :: t => - val hv: DecodeResult[H] = - ValueDecoder[H].parse(h).leftMap(_._2).toValidatedNel - val tv = RowDecoder[T].apply(t) + val hv: RowDecodeResult[H] = ValueDecoder[H].parse(h).toValidatedNel + val tv: RowDecodeResult[T] = RowDecoder[T].apply(t) (hv, tv).mapN { _ :: _ } - case Nil => "Not enough values, format is invalid".invalidNel + case Nil => UnhandledRowDecodingError("Not enough values, format is invalid").invalidNel } - implicit val hnilFromRow: RowDecoder[HNil] = fromFunc { + implicit def hnilFromRow: RowDecoder[HNil] = fromFunc { case Nil => HNil.validNel - case rows => s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}".invalidNel + case rows => UnhandledRowDecodingError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel } implicit def hconsFromRow[H: ValueDecoder, T <: HList: RowDecoder]: RowDecoder[H :: T] = diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoder.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoder.scala index 4e05a50..b28f8b7 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoder.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoder.scala @@ -34,6 +34,8 @@ import io.circe.{Error, Json} // This library import com.snowplowanalytics.snowplow.analytics.scalasdk.Common.{ContextsCriterion, UnstructEventCriterion} import com.snowplowanalytics.snowplow.analytics.scalasdk.SnowplowEvent.{Contexts, UnstructEvent} +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo._ private[decode] trait ValueDecoder[A] { def parse(column: (Key, String)): DecodedValue[A] @@ -50,7 +52,7 @@ private[decode] object ValueDecoder { implicit final val stringColumnDecoder: ValueDecoder[String] = fromFunc[String] { case (key, value) => - if (value.isEmpty) (key, s"Field $key cannot be empty").asLeft else value.asRight + if (value.isEmpty) InvalidValue(key, value, s"Field $key cannot be empty").asLeft else value.asRight } implicit final val stringOptionColumnDecoder: ValueDecoder[Option[String]] = @@ -67,7 +69,7 @@ private[decode] object ValueDecoder { value.toInt.some.asRight } catch { case _: NumberFormatException => - (key, s"Cannot parse key $key with value $value into integer").asLeft + InvalidValue(key, value, s"Cannot parse key $key with value $value into integer").asLeft } } @@ -75,13 +77,13 @@ private[decode] object ValueDecoder { fromFunc[UUID] { case (key, value) => if (value.isEmpty) - (key, s"Field $key cannot be empty").asLeft + InvalidValue(key, value, s"Field $key cannot be empty").asLeft else try { - UUID.fromString(value).asRight[(Key, String)] + UUID.fromString(value).asRight[RowDecodingErrorInfo] } catch { case _: IllegalArgumentException => - (key, s"Cannot parse key $key with value $value into UUID").asLeft + InvalidValue(key, value, s"Cannot parse key $key with value $value into UUID").asLeft } } @@ -92,7 +94,7 @@ private[decode] object ValueDecoder { case "0" => false.some.asRight case "1" => true.some.asRight case "" => none[Boolean].asRight - case _ => (key, s"Cannot parse key $key with value $value into boolean").asLeft + case _ => InvalidValue(key, value, s"Cannot parse key $key with value $value into boolean").asLeft } } @@ -105,7 +107,7 @@ private[decode] object ValueDecoder { value.toDouble.some.asRight } catch { case _: NumberFormatException => - (key, s"Cannot parse key $key with value $value into double").asLeft + InvalidValue(key, value, s"Cannot parse key $key with value $value into double").asLeft } } @@ -113,14 +115,14 @@ private[decode] object ValueDecoder { fromFunc[Instant] { case (key, value) => if (value.isEmpty) - (key, s"Field $key cannot be empty").asLeft + InvalidValue(key, value, s"Field $key cannot be empty").asLeft else { val tstamp = reformatTstamp(value) try { Instant.parse(tstamp).asRight } catch { case _: DateTimeParseException => - (key, s"Cannot parse key $key with value $value into datetime").asLeft + InvalidValue(key, value, s"Cannot parse key $key with value $value into datetime").asLeft } } } @@ -129,14 +131,14 @@ private[decode] object ValueDecoder { fromFunc[Option[Instant]] { case (key, value) => if (value.isEmpty) - none[Instant].asRight[(Key, String)] + none[Instant].asRight[RowDecodingErrorInfo] else { val tstamp = reformatTstamp(value) try { Instant.parse(tstamp).some.asRight } catch { case _: DateTimeParseException => - (key, s"Cannot parse key $key with value $value into datetime").asLeft + InvalidValue(key, value, s"Cannot parse key $key with value $value into datetime").asLeft } } } @@ -144,9 +146,9 @@ private[decode] object ValueDecoder { implicit final val unstructuredJson: ValueDecoder[UnstructEvent] = fromFunc[UnstructEvent] { case (key, value) => - def asLeft(error: Error): (Key, String) = (key, error.show) + def asLeft(error: Error): RowDecodingErrorInfo = InvalidValue(key, value, error.show) if (value.isEmpty) - UnstructEvent(None).asRight[(Key, String)] + UnstructEvent(None).asRight[RowDecodingErrorInfo] else parseJson(value) .flatMap(_.as[SelfDescribingData[Json]]) @@ -154,7 +156,7 @@ private[decode] object ValueDecoder { case Right(SelfDescribingData(schema, data)) if UnstructEventCriterion.matches(schema) => data.as[SelfDescribingData[Json]].leftMap(asLeft).map(_.some).map(UnstructEvent.apply) case Right(SelfDescribingData(schema, _)) => - (key, s"Unknown payload: ${schema.toSchemaUri}").asLeft[UnstructEvent] + InvalidValue(key, value, s"Unknown payload: ${schema.toSchemaUri}").asLeft[UnstructEvent] case Left(error) => error.asLeft[UnstructEvent] } } @@ -162,9 +164,9 @@ private[decode] object ValueDecoder { implicit final val contexts: ValueDecoder[Contexts] = fromFunc[Contexts] { case (key, value) => - def asLeft(error: Error): (Key, String) = (key, error.show) + def asLeft(error: Error): RowDecodingErrorInfo = InvalidValue(key, value, error.show) if (value.isEmpty) - Contexts(List()).asRight[(Key, String)] + Contexts(List()).asRight[RowDecodingErrorInfo] else parseJson(value) .flatMap(_.as[SelfDescribingData[Json]]) @@ -172,7 +174,7 @@ private[decode] object ValueDecoder { case Right(SelfDescribingData(schema, data)) if ContextsCriterion.matches(schema) => data.as[List[SelfDescribingData[Json]]].leftMap(asLeft).map(Contexts.apply) case Right(SelfDescribingData(schema, _)) => - (key, s"Unknown payload: ${schema.toSchemaUri}").asLeft[Contexts] + InvalidValue(key, value, s"Unknown payload: ${schema.toSchemaUri}").asLeft[Contexts] case Left(error) => error.asLeft[Contexts] } } diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/package.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/package.scala index 7027446..339eed6 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/package.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/package.scala @@ -12,15 +12,30 @@ */ package com.snowplowanalytics.snowplow.analytics.scalasdk -import cats.data.ValidatedNel +import cats.data.{Validated, ValidatedNel} +import cats.syntax.either._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo +import io.circe.{Decoder, Encoder} +import io.circe.syntax._ package object decode { /** Expected name of the field */ type Key = Symbol + object Key { + implicit val analyticsSdkKeyCirceEncoder: Encoder[Key] = + Encoder.instance(_.toString.stripPrefix("'").asJson) + + implicit val analyticsSdkKeyCirceDecoder: Decoder[Key] = + Decoder.instance(_.as[String].map(Symbol(_))) + } + /** Result of single-value parsing */ - type DecodedValue[A] = Either[(Key, String), A] + type DecodedValue[A] = Either[RowDecodingErrorInfo, A] + + /** Result of row decode process */ + type RowDecodeResult[A] = ValidatedNel[RowDecodingErrorInfo, A] - /** Result of TSV line parsing, which is either an event or non empty list of parse errors */ - type DecodeResult[A] = ValidatedNel[String, A] + /** Result of TSV line parsing, which is either an event or parse error */ + type DecodeResult[A] = Validated[ParsingError, A] } diff --git a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala index 9fc4b0a..8d95f1c 100644 --- a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala +++ b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala @@ -20,6 +20,7 @@ import java.util.UUID // cats import cats.data.Validated.{Invalid, Valid} import cats.data.NonEmptyList +import cats.syntax.either._ // circe import io.circe.{Json, JsonObject} @@ -34,6 +35,8 @@ import com.snowplowanalytics.iglu.core.{SchemaKey, SchemaVer, SelfDescribingData // This library import com.snowplowanalytics.snowplow.analytics.scalasdk.SnowplowEvent._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo._ /** * Tests Event case class @@ -713,7 +716,7 @@ class EventSpec extends Specification { "event_version": "1-0-0", "event_fingerprint": "e3dbfa9cca0412c3d4052863cefb547f", "true_tstamp": "2013-11-26T00:03:57.886Z" - }""").getOrElse(throw new RuntimeException("Failed to parse expected JSON")) + }""").right.getOrElse(throw new RuntimeException("Failed to parse expected JSON")) // JSON output must be equal to output from the old transformer. (NB: field ordering in new JSON will be randomized) eventJson mustEqual expectedJson @@ -1127,7 +1130,7 @@ class EventSpec extends Specification { "event_version": "1-0-0", "event_fingerprint": "e3dbfa9cca0412c3d4052863cefb547f", "true_tstamp": "2013-11-26T00:03:57.886Z" - }""").getOrElse(throw new RuntimeException("Failed to parse expected JSON")) + }""").right.getOrElse(throw new RuntimeException("Failed to parse expected JSON")) // JSON output must be equal to output from the old transformer. (NB: field ordering in new JSON will be randomized) eventJson mustEqual expectedJson @@ -1658,7 +1661,7 @@ class EventSpec extends Specification { "event_version": "1-0-0", "event_fingerprint": "e3dbfa9cca0412c3d4052863cefb547f", "true_tstamp": "2013-11-26T00:03:57.886Z" - }""").getOrElse(throw new RuntimeException("Failed to parse expected JSON")) + }""").right.getOrElse(throw new RuntimeException("Failed to parse expected JSON")) // JSON output must be equal to output from the old transformer. (NB: field ordering in new JSON will be randomized) eventJson mustEqual expectedJson @@ -1854,7 +1857,7 @@ class EventSpec extends Specification { ) } - "fail if column values are invalid (and combine errors)" in { + "fail (and combine errors) if values are invalid" in { val input = List( "app_id" -> "angry-birds", @@ -1986,23 +1989,187 @@ class EventSpec extends Specification { "event_name" -> "link_click", "event_format" -> "jsonschema", "event_version" -> "1-0-0", - "event_fingerprint" -> "e3dbfa9cca0412c3d4052863cefb547f" + "event_fingerprint" -> "e3dbfa9cca0412c3d4052863cefb547f", + "true_tstamp" -> "2013-11-26 00:03:57.886" ) val eventValues = input.unzip._2.mkString("\t") val event = Event.parse(eventValues) // Case class must be correctly invalidated - event mustEqual Invalid(NonEmptyList.of( - "Cannot parse key 'etl_tstamp with value not_an_instant into datetime", - "Field 'collector_tstamp cannot be empty", - "Cannot parse key 'event_id with value not_a_uuid into UUID", - "Cannot parse key 'txn_id with value not_an_integer into integer", - "Field 'v_collector cannot be empty", - "Cannot parse key 'geo_latitude with value not_a_double into double", - "Cannot parse key 'br_features_pdf with value not_a_boolean into boolean", - "Cannot parse key 'true_tstamp with value VALUE IS MISSING into datetime" - )) + val res = RowDecodingError( + NonEmptyList.of( + InvalidValue(Symbol("etl_tstamp"), "not_an_instant", "Cannot parse key 'etl_tstamp with value not_an_instant into datetime"), + InvalidValue(Symbol("collector_tstamp"), "", "Field 'collector_tstamp cannot be empty"), + InvalidValue(Symbol("event_id"), "not_a_uuid", "Cannot parse key 'event_id with value not_a_uuid into UUID"), + InvalidValue(Symbol("txn_id"), "not_an_integer", "Cannot parse key 'txn_id with value not_an_integer into integer"), + InvalidValue(Symbol("v_collector"), "", "Field 'v_collector cannot be empty"), + InvalidValue(Symbol("geo_latitude"), "not_a_double", "Cannot parse key 'geo_latitude with value not_a_double into double"), + InvalidValue(Symbol("br_features_pdf"), "not_a_boolean", "Cannot parse key 'br_features_pdf with value not_a_boolean into boolean") + ) + ) + event mustEqual Invalid(res) + } + + "fail if payload is not TSV" in { + val event = Event.parse("non tsv") + event mustEqual Invalid(NotTSV) + } + + "fail if there are more fields than expected" in { + val input = List( + "app_id" -> "angry-birds", + "platform" -> "web", + "etl_tstamp" -> "not_an_instant", + "collector_tstamp" -> "", + "dvce_created_tstamp" -> "2013-11-26 00:03:57.885", + "event" -> "page_view", + "event_id" -> "not_a_uuid", + "txn_id" -> "not_an_integer", + "name_tracker" -> "cloudfront-1", + "v_tracker" -> "js-2.1.0", + "v_collector" -> "", + "v_etl" -> "serde-0.5.2", + "user_id" -> "jon.doe@email.com", + "user_ipaddress" -> "92.231.54.234", + "user_fingerprint" -> "2161814971", + "domain_userid" -> "bc2e92ec6c204a14", + "domain_sessionidx" -> "3", + "network_userid" -> "ecdff4d0-9175-40ac-a8bb-325c49733607", + "geo_country" -> "US", + "geo_region" -> "TX", + "geo_city" -> "New York", + "geo_zipcode" -> "94109", + "geo_latitude" -> "not_a_double", + "geo_longitude" -> "-122.4124", + "geo_region_name" -> "Florida", + "ip_isp" -> "FDN Communications", + "ip_organization" -> "Bouygues Telecom", + "ip_domain" -> "nuvox.net", + "ip_netspeed" -> "Cable/DSL", + "page_url" -> "http://www.snowplowanalytics.com", + "page_title" -> "On Analytics", + "page_referrer" -> "", + "page_urlscheme" -> "http", + "page_urlhost" -> "www.snowplowanalytics.com", + "page_urlport" -> "80", + "page_urlpath" -> "/product/index.html", + "page_urlquery" -> "id=GTM-DLRG", + "page_urlfragment" -> "4-conclusion", + "refr_urlscheme" -> "", + "refr_urlhost" -> "", + "refr_urlport" -> "", + "refr_urlpath" -> "", + "refr_urlquery" -> "", + "refr_urlfragment" -> "", + "refr_medium" -> "", + "refr_source" -> "", + "refr_term" -> "", + "mkt_medium" -> "", + "mkt_source" -> "", + "mkt_term" -> "", + "mkt_content" -> "", + "mkt_campaign" -> "", + "contexts" -> contextsJson, + "se_category" -> "", + "se_action" -> "", + "se_label" -> "", + "se_property" -> "", + "se_value" -> "", + "unstruct_event" -> unstructJson, + "tr_orderid" -> "", + "tr_affiliation" -> "", + "tr_total" -> "", + "tr_tax" -> "", + "tr_shipping" -> "", + "tr_city" -> "", + "tr_state" -> "", + "tr_country" -> "", + "ti_orderid" -> "", + "ti_sku" -> "", + "ti_name" -> "", + "ti_category" -> "", + "ti_price" -> "", + "ti_quantity" -> "", + "pp_xoffset_min" -> "", + "pp_xoffset_max" -> "", + "pp_yoffset_min" -> "", + "pp_yoffset_max" -> "", + "useragent" -> "", + "br_name" -> "", + "br_family" -> "", + "br_version" -> "", + "br_type" -> "", + "br_renderengine" -> "", + "br_lang" -> "", + "br_features_pdf" -> "not_a_boolean", + "br_features_flash" -> "0", + "br_features_java" -> "", + "br_features_director" -> "", + "br_features_quicktime" -> "", + "br_features_realplayer" -> "", + "br_features_windowsmedia" -> "", + "br_features_gears" -> "", + "br_features_silverlight" -> "", + "br_cookies" -> "", + "br_colordepth" -> "", + "br_viewwidth" -> "", + "br_viewheight" -> "", + "os_name" -> "", + "os_family" -> "", + "os_manufacturer" -> "", + "os_timezone" -> "", + "dvce_type" -> "", + "dvce_ismobile" -> "", + "dvce_screenwidth" -> "", + "dvce_screenheight" -> "", + "doc_charset" -> "", + "doc_width" -> "", + "doc_height" -> "", + "tr_currency" -> "", + "tr_total_base" -> "", + "tr_tax_base" -> "", + "tr_shipping_base" -> "", + "ti_currency" -> "", + "ti_price_base" -> "", + "base_currency" -> "", + "geo_timezone" -> "", + "mkt_clickid" -> "", + "mkt_network" -> "", + "etl_tags" -> "", + "dvce_sent_tstamp" -> "", + "refr_domain_userid" -> "", + "refr_dvce_tstamp" -> "", + "derived_contexts" -> derivedContextsJson, + "domain_sessionid" -> "2b15e5c8-d3b1-11e4-b9d6-1681e6b88ec1", + "derived_tstamp" -> "2013-11-26 00:03:57.886", + "event_vendor" -> "com.snowplowanalytics.snowplow", + "event_name" -> "link_click", + "event_format" -> "jsonschema", + "event_version" -> "1-0-0", + "event_fingerprint" -> "e3dbfa9cca0412c3d4052863cefb547f", + "true_tstamp" -> "2013-11-26 00:03:57.886", + "additional_field" -> "mock_value" + ) + + val eventValues = input.unzip._2.mkString("\t") + val event = Event.parse(eventValues) + + event mustEqual Invalid(FieldNumberMismatch(132)) + } + + "fail if there are fewer fields than expected" in { + val input = List( + "app_id" -> "angry-birds", + "platform" -> "web", + "etl_tstamp" -> "not_an_instant", + "collector_tstamp" -> "" + ) + + val eventValues = input.unzip._2.mkString("\t") + val event = Event.parse(eventValues) + + event mustEqual Invalid(FieldNumberMismatch(4)) } "successfully decode encoded event which has no contexts or unstruct_event" in { diff --git a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingErrorSpec.scala b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingErrorSpec.scala new file mode 100644 index 0000000..e47d11b --- /dev/null +++ b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/ParsingErrorSpec.scala @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.analytics.scalasdk + +import cats.data.NonEmptyList + +import io.circe.{Decoder, Json} +import io.circe.syntax._ +import io.circe.parser._ + +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError._ +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo._ +import org.specs2.Specification + +class ParsingErrorSpec extends Specification { def is = s2""" + ParsingError encoder-decoder + works correctly with NotTSV error $e1 + works correctly with FieldNumberMismatch error $e2 + works correctly with RowDecodingError $e3 + """ + + def e1 = { + val errorJson = parseJson( + """ + |{ + | "type": "NotTSV" + |} + """.stripMargin + ) + + val decoded = decodeJson[ParsingError](errorJson) + val encoded = decoded.asJson + + (decoded must beEqualTo(NotTSV)) and (encoded must beEqualTo(errorJson)) + } + + def e2 = { + val errorJson = parseJson( + """ + |{ + | "type": "FieldNumberMismatch", + | "fieldCount": 120 + |} + """.stripMargin + ) + + val decoded = decodeJson[ParsingError](errorJson) + val encoded = decoded.asJson + + (decoded must beEqualTo(FieldNumberMismatch(120))) and (encoded must beEqualTo(errorJson)) + } + + def e3 = { + val errorJson = parseJson( + """ + |{ + | "type": "RowDecodingError", + | "errors": [ + | { + | "type": "InvalidValue", + | "key": "exampleKey", + | "value": "exampleValue", + | "message": "exampleMessage" + | }, + | { + | "type": "UnhandledRowDecodingError", + | "message": "exampleError" + | } + | ] + |} + """.stripMargin + ) + + val decoded = decodeJson[ParsingError](errorJson) + val encoded = decoded.asJson + + val expected = RowDecodingError( + NonEmptyList.of( + InvalidValue(Symbol("exampleKey"), "exampleValue", "exampleMessage"), + UnhandledRowDecodingError("exampleError") + ) + ) + + (decoded must beEqualTo(expected)) and (encoded must beEqualTo(errorJson)) + } + + private def parseJson(jsonStr: String): Json = + parse(jsonStr).right.getOrElse(throw new RuntimeException("Failed to parse expected JSON.")) + + private def decodeJson[A: Decoder](json: Json): A = { + json.as[A].right.getOrElse(throw new RuntimeException("Failed to decode to ParsingError.")) + } +} diff --git a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoderSpec.scala b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoderSpec.scala index d62377a..b7d602b 100644 --- a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoderSpec.scala +++ b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/decode/ValueDecoderSpec.scala @@ -33,6 +33,7 @@ import com.snowplowanalytics.iglu.core.{SchemaKey, SchemaVer, SelfDescribingData // This library import com.snowplowanalytics.snowplow.analytics.scalasdk.SnowplowEvent.{Contexts, UnstructEvent} +import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.RowDecodingErrorInfo._ /** * Tests ValueDecoder class @@ -41,7 +42,7 @@ class ValueDecoderSpec extends Specification { "The ValueDecoder class" should { "parse String and Option[String] values" in { - ValueDecoder[String].parse(Symbol("key"), "") mustEqual (Symbol("key"), "Field 'key cannot be empty").asLeft + ValueDecoder[String].parse(Symbol("key"), "") mustEqual InvalidValue(Symbol("key"), "", "Field 'key cannot be empty").asLeft ValueDecoder[String].parse(Symbol("key"), "value") mustEqual "value".asRight ValueDecoder[Option[String]].parse(Symbol("key"), "") mustEqual None.asRight ValueDecoder[Option[String]].parse(Symbol("key"), "value") mustEqual Some("value").asRight @@ -50,35 +51,35 @@ class ValueDecoderSpec extends Specification { "parse Option[Int] values" in { ValueDecoder[Option[Int]].parse(Symbol("key"), "") mustEqual None.asRight ValueDecoder[Option[Int]].parse(Symbol("key"), "42") mustEqual Some(42).asRight - ValueDecoder[Option[Int]].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into integer").asLeft + ValueDecoder[Option[Int]].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value", "Cannot parse key 'key with value value into integer").asLeft } "parse UUID values" in { - ValueDecoder[UUID].parse(Symbol("key"), "") mustEqual (Symbol("key"), "Field 'key cannot be empty").asLeft + ValueDecoder[UUID].parse(Symbol("key"), "") mustEqual InvalidValue(Symbol("key"), "", "Field 'key cannot be empty").asLeft ValueDecoder[UUID].parse(Symbol("key"), "d2161fd1-ffed-41df-ac3e-a729012105f5") mustEqual UUID.fromString("d2161fd1-ffed-41df-ac3e-a729012105f5").asRight - ValueDecoder[UUID].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into UUID").asLeft + ValueDecoder[UUID].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value" ,"Cannot parse key 'key with value value into UUID").asLeft } "parse Option[Boolean] values" in { ValueDecoder[Option[Boolean]].parse(Symbol("key"), "") mustEqual None.asRight ValueDecoder[Option[Boolean]].parse(Symbol("key"), "0") mustEqual Some(false).asRight ValueDecoder[Option[Boolean]].parse(Symbol("key"), "1") mustEqual Some(true).asRight - ValueDecoder[Option[Boolean]].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into boolean").asLeft + ValueDecoder[Option[Boolean]].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value", "Cannot parse key 'key with value value into boolean").asLeft } "parse Option[Double] values" in { ValueDecoder[Option[Double]].parse(Symbol("key"), "") mustEqual None.asRight ValueDecoder[Option[Double]].parse(Symbol("key"), "42.5") mustEqual Some(42.5).asRight - ValueDecoder[Option[Double]].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into double").asLeft + ValueDecoder[Option[Double]].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value", "Cannot parse key 'key with value value into double").asLeft } "parse Instant and Option[Instant] values" in { - ValueDecoder[Instant].parse(Symbol("key"), "") mustEqual (Symbol("key"), "Field 'key cannot be empty").asLeft + ValueDecoder[Instant].parse(Symbol("key"), "") mustEqual InvalidValue(Symbol("key"), "", "Field 'key cannot be empty").asLeft ValueDecoder[Instant].parse(Symbol("key"), "2013-11-26 00:03:57.885") mustEqual Instant.parse("2013-11-26T00:03:57.885Z").asRight - ValueDecoder[Instant].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into datetime").asLeft + ValueDecoder[Instant].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value", "Cannot parse key 'key with value value into datetime").asLeft ValueDecoder[Option[Instant]].parse(Symbol("key"), "") mustEqual None.asRight ValueDecoder[Option[Instant]].parse(Symbol("key"), "2013-11-26 00:03:57.885") mustEqual Some(Instant.parse("2013-11-26T00:03:57.885Z")).asRight - ValueDecoder[Option[Instant]].parse(Symbol("key"), "value") mustEqual (Symbol("key"), "Cannot parse key 'key with value value into datetime").asLeft + ValueDecoder[Option[Instant]].parse(Symbol("key"), "value") mustEqual InvalidValue(Symbol("key"), "value", "Cannot parse key 'key with value value into datetime").asLeft } "parse Contexts values" in { @@ -153,7 +154,7 @@ class ValueDecoderSpec extends Specification { ) ) ).asRight - ValueDecoder[Contexts].parse(Symbol("key"), invalidPayloadContexts) mustEqual (Symbol("key"), "Unknown payload: iglu:invalid/schema/jsonschema/1-0-0").asLeft + ValueDecoder[Contexts].parse(Symbol("key"), invalidPayloadContexts) mustEqual InvalidValue(Symbol("key"), invalidPayloadContexts, "Unknown payload: iglu:invalid/schema/jsonschema/1-0-0").asLeft } "parse UnstructEvent values" in { @@ -199,7 +200,7 @@ class ValueDecoderSpec extends Specification { ) ) ).asRight - ValueDecoder[UnstructEvent].parse(Symbol("key"), invalidPayloadUnstruct) mustEqual (Symbol("key"), "Unknown payload: iglu:invalid/schema/jsonschema/1-0-0").asLeft + ValueDecoder[UnstructEvent].parse(Symbol("key"), invalidPayloadUnstruct) mustEqual InvalidValue(Symbol("key"), invalidPayloadUnstruct, "Unknown payload: iglu:invalid/schema/jsonschema/1-0-0").asLeft } } } From 9268c7dff742f222cd17905f6e189c4c0a97eccb Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Sun, 7 Jul 2019 14:16:27 +0300 Subject: [PATCH 02/12] Add function to create minimal event (#81) --- .../Event.scala | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/Event.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/Event.scala index b43b567..a9f722e 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/Event.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/Event.scala @@ -256,4 +256,18 @@ object Event { */ def parse(line: String): DecodeResult[Event] = parser.parse(line) -} + + /** + * Creates an event with only required fields. + * All optional fields are set to [[None]]. + */ + def minimal(id: UUID, collectorTstamp: Instant, vCollector: String, vEtl: String): Event = + Event(None, None, None, collectorTstamp, None, None, id, None, None, None, vCollector, vEtl, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, + Contexts(Nil), None, None, None, None, None, UnstructEvent(None), None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, + Contexts(Nil), None, None, None, None, None, None, None, None) +} \ No newline at end of file From b90e8190cccd379e47d318067220a8d97a8e5380 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Sun, 7 Jul 2019 14:39:16 +0300 Subject: [PATCH 03/12] Remove Vagrant setup (closes #84) --- Vagrantfile | 19 ---------------- vagrant/.gitignore | 3 --- vagrant/ansible.hosts | 2 -- vagrant/peru.yaml | 14 ------------ vagrant/up.bash | 50 ------------------------------------------- vagrant/up.guidance | 4 ---- vagrant/up.playbooks | 3 --- 7 files changed, 95 deletions(-) delete mode 100644 Vagrantfile delete mode 100644 vagrant/.gitignore delete mode 100644 vagrant/ansible.hosts delete mode 100644 vagrant/peru.yaml delete mode 100755 vagrant/up.bash delete mode 100644 vagrant/up.guidance delete mode 100644 vagrant/up.playbooks diff --git a/Vagrantfile b/Vagrantfile deleted file mode 100644 index 2285f66..0000000 --- a/Vagrantfile +++ /dev/null @@ -1,19 +0,0 @@ -Vagrant.configure("2") do |config| - - config.vm.box = "ubuntu/trusty64" - config.vm.hostname = "snowplow-scala-analytics-sdk" - config.ssh.forward_agent = true - - config.vm.provider :virtualbox do |vb| - vb.name = Dir.pwd().split("/")[-1] + "-" + Time.now.to_f.to_i.to_s - vb.customize ["modifyvm", :id, "--natdnshostresolver1", "on"] - vb.customize [ "guestproperty", "set", :id, "--timesync-threshold", 10000 ] - # Scala is memory-hungry - vb.memory = 5120 - end - - config.vm.provision :shell do |sh| - sh.path = "vagrant/up.bash" - end - -end diff --git a/vagrant/.gitignore b/vagrant/.gitignore deleted file mode 100644 index 1b4b29f..0000000 --- a/vagrant/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.peru -oss-playbooks -ansible diff --git a/vagrant/ansible.hosts b/vagrant/ansible.hosts deleted file mode 100644 index 588fa08..0000000 --- a/vagrant/ansible.hosts +++ /dev/null @@ -1,2 +0,0 @@ -[vagrant] -127.0.0.1:2222 diff --git a/vagrant/peru.yaml b/vagrant/peru.yaml deleted file mode 100644 index e7fdf41..0000000 --- a/vagrant/peru.yaml +++ /dev/null @@ -1,14 +0,0 @@ -imports: - ansible: ansible - ansible_playbooks: oss-playbooks - -curl module ansible: - # Equivalent of git cloning tags/v1.6.6 but much, much faster - url: https://codeload.github.com/ansible/ansible/zip/69d85c22c7475ccf8169b6ec9dee3ee28c92a314 - unpack: zip - export: ansible-69d85c22c7475ccf8169b6ec9dee3ee28c92a314 - -git module ansible_playbooks: - url: https://github.com/snowplow/ansible-playbooks.git - # Comment out to fetch a specific rev instead of master: - # rev: xxx diff --git a/vagrant/up.bash b/vagrant/up.bash deleted file mode 100755 index 7450ae8..0000000 --- a/vagrant/up.bash +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -set -e - -vagrant_dir=/vagrant/vagrant -bashrc=/home/vagrant/.bashrc - -echo "========================================" -echo "INSTALLING PERU AND ANSIBLE DEPENDENCIES" -echo "----------------------------------------" -apt-get update -apt-get install -y language-pack-en git unzip libyaml-dev python3-pip python-yaml python-paramiko python-jinja2 - -echo "===============" -echo "INSTALLING PERU" -echo "---------------" -sudo pip3 install peru - -echo "=======================================" -echo "CLONING ANSIBLE AND PLAYBOOKS WITH PERU" -echo "---------------------------------------" -cd ${vagrant_dir} && peru sync -v -echo "... done" - -env_setup=${vagrant_dir}/ansible/hacking/env-setup -hosts=${vagrant_dir}/ansible.hosts - -echo "===================" -echo "CONFIGURING ANSIBLE" -echo "-------------------" -touch ${bashrc} -echo "source ${env_setup}" >> ${bashrc} -echo "export ANSIBLE_HOSTS=${hosts}" >> ${bashrc} -echo "... done" - -echo "==========================================" -echo "RUNNING PLAYBOOKS WITH ANSIBLE*" -echo "* no output while each playbook is running" -echo "------------------------------------------" -while read pb; do - su - -c "source ${env_setup} && ${vagrant_dir}/ansible/bin/ansible-playbook ${vagrant_dir}/${pb} --connection=local --inventory-file=${hosts}" vagrant -done <${vagrant_dir}/up.playbooks - -guidance=${vagrant_dir}/up.guidance - -if [ -f ${guidance} ]; then - echo "===========" - echo "PLEASE READ" - echo "-----------" - cat $guidance -fi diff --git a/vagrant/up.guidance b/vagrant/up.guidance deleted file mode 100644 index 124cdc0..0000000 --- a/vagrant/up.guidance +++ /dev/null @@ -1,4 +0,0 @@ -To get started: -vagrant ssh -cd /vagrant -sbt test diff --git a/vagrant/up.playbooks b/vagrant/up.playbooks deleted file mode 100644 index 5a7dbc3..0000000 --- a/vagrant/up.playbooks +++ /dev/null @@ -1,3 +0,0 @@ -oss-playbooks/java7.yml -oss-playbooks/scala.yml -oss-playbooks/sbt.yml From d7ba79ce633c172a311d2bd7adcf88b6d154e218 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Tue, 13 Aug 2019 13:04:28 +0300 Subject: [PATCH 04/12] Extend copyright notice to 2019 (closes #85) --- README.md | 2 +- project/BuildSettings.scala | 2 +- .../RunManifests.scala | 2 +- .../RunManifestsSpec.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index a62a3fd..2ea2a1d 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Use this SDK with **[Apache Spark][spark]**, **[AWS Lambda][lambda]**, **[Apache ## Copyright and license -The Snowplow Scala Analytics SDK is copyright 2016-2017 Snowplow Analytics Ltd. +The Snowplow Scala Analytics SDK is copyright 2016-2019 Snowplow Analytics Ltd. Licensed under the **[Apache License, Version 2.0][license]** (the "License"); you may not use this software except in compliance with the License. diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index 05a369e..8b41b62 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2018 Snowplow Analytics Ltd. All rights reserved. + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala index 55a49a9..09112e3 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2018 Snowplow Analytics Ltd. All rights reserved. + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. diff --git a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifestsSpec.scala b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifestsSpec.scala index b16a676..22d492d 100644 --- a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifestsSpec.scala +++ b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifestsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2018 Snowplow Analytics Ltd. All rights reserved. + * Copyright (c) 2016-2019 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. From 7dd06c6df68d30f3e62699cdf32c23261275abf2 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Sat, 17 Aug 2019 23:30:10 +0300 Subject: [PATCH 05/12] Deprecate run manifest (closes #86) --- .../RunManifests.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala index 09112e3..a43f80b 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/RunManifests.scala @@ -32,6 +32,7 @@ import com.amazonaws.services.s3.model.{ ListObjectsV2Request, ListObjectsV2Resu * @param dynamodb AWS DynamoDB client * @param tableName existing DynamoDB table name with run manifests */ +@deprecated("In favor of https://github.com/snowplow-incubator/snowplow-processing-manifest/", "snowplow-scala-analytics-sdk 1.0.0") class RunManifests(dynamodb: AmazonDynamoDB, tableName: String) { /** * Creates DynamoDB table with all necessary settings @@ -62,6 +63,7 @@ class RunManifests(dynamodb: AmazonDynamoDB, tableName: String) { /** * Module with primary run-manifests functions, without applied client */ +@deprecated("In favor of https://github.com/snowplow-incubator/snowplow-processing-manifest/", "snowplow-scala-analytics-sdk 1.0.0") object RunManifests { /** From 505faa779a93de722710d53e7723bd49c6ca0417 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Wed, 14 Aug 2019 23:02:48 +0300 Subject: [PATCH 06/12] Integrate MiMa (closes #87) --- build.sbt | 1 + project/BuildSettings.scala | 19 +++++++++++++++++++ project/plugins.sbt | 1 + 3 files changed, 21 insertions(+) diff --git a/build.sbt b/build.sbt index 859403d..7ad8cc6 100644 --- a/build.sbt +++ b/build.sbt @@ -22,6 +22,7 @@ lazy val root = project.in(file(".")) )) .settings(BuildSettings.buildSettings) .settings(BuildSettings.publishSettings) + .settings(BuildSettings.mimaSettings) .settings(Seq( shellPrompt := { _ => name.value + " > " } )) diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index 8b41b62..f87726c 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -19,6 +19,10 @@ import Keys._ import bintray.BintrayPlugin._ import bintray.BintrayKeys._ +// Mima plugin +import com.typesafe.tools.mima.plugin.MimaKeys._ +import com.typesafe.tools.mima.plugin.MimaPlugin + object BuildSettings { // Basic settings for our app @@ -59,4 +63,19 @@ object BuildSettings { ) ) + + // If new version introduces breaking changes, + // clear-out mimaBinaryIssueFilters and mimaPreviousVersions. + // Otherwise, add previous version to set without + // removing other versions. + val mimaPreviousVersions = Set() + + val mimaSettings = MimaPlugin.mimaDefaultSettings ++ Seq( + mimaPreviousArtifacts := mimaPreviousVersions.map { organization.value %% name.value % _ }, + mimaBinaryIssueFilters ++= Seq(), + test in Test := { + mimaReportBinaryIssues.value + (test in Test).value + } + ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 6972373..5f167a6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1 +1,2 @@ addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.3") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.5.0") From 89282efa6e5c202c3370dd54d1a4be28d7e48e43 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Thu, 15 Aug 2019 15:28:31 +0300 Subject: [PATCH 07/12] Integrate scoverage (closes #90) --- .travis.yml | 2 +- build.sbt | 1 + project/BuildSettings.scala | 12 ++++++++++++ project/plugins.sbt | 1 + 4 files changed, 15 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 38fe6f1..0f0a3db 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ scala: jdk: - oraclejdk8 script: - - sbt test + - sbt coverage test deploy: skip_cleanup: true provider: script diff --git a/build.sbt b/build.sbt index 7ad8cc6..f2eedc7 100644 --- a/build.sbt +++ b/build.sbt @@ -23,6 +23,7 @@ lazy val root = project.in(file(".")) .settings(BuildSettings.buildSettings) .settings(BuildSettings.publishSettings) .settings(BuildSettings.mimaSettings) + .settings(BuildSettings.scoverageSettings) .settings(Seq( shellPrompt := { _ => name.value + " > " } )) diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index f87726c..1555638 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -23,6 +23,9 @@ import bintray.BintrayKeys._ import com.typesafe.tools.mima.plugin.MimaKeys._ import com.typesafe.tools.mima.plugin.MimaPlugin +// Scoverage plugin +import scoverage.ScoverageKeys._ + object BuildSettings { // Basic settings for our app @@ -78,4 +81,13 @@ object BuildSettings { (test in Test).value } ) + + val scoverageSettings = Seq( + coverageMinimum := 50, + coverageFailOnMinimum := true, + coverageHighlighting := false, + (test in Test) := { + (coverageReport dependsOn (test in Test)).value + } + ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f167a6..5888fe2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,2 +1,3 @@ addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.5.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") \ No newline at end of file From d08add82c2af5479e022f196ab4ceb230c468479 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Fri, 16 Aug 2019 14:04:14 +0300 Subject: [PATCH 08/12] Integrate sbt-gh-pages to create GH Pages from Scaladoc (closes #91) --- .travis.yml | 1 + .travis/deploy.sh | 1 + .travis/deploy_docs.sh | 17 +++++++++++++++++ README.md | 4 +++- build.sbt | 4 ++++ project/BuildSettings.scala | 18 ++++++++++++++++++ project/plugins.sbt | 5 ++++- project/travis-deploy-key.enc | Bin 0 -> 3392 bytes src/site-preprocess/index.html | 20 ++++++++++++++++++++ 9 files changed, 68 insertions(+), 2 deletions(-) create mode 100755 .travis/deploy_docs.sh create mode 100644 project/travis-deploy-key.enc create mode 100644 src/site-preprocess/index.html diff --git a/.travis.yml b/.travis.yml index 0f0a3db..8b78630 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,3 +23,4 @@ env: # SONA_PASS - secure: oDu2dXzektYr/7K5nw7EL2qDUR5AhO4Uz6XGHoOQsN1gJiovdsa5nJeDHgo2YFGpJljyTo+lABbxpGIFQpcnKGIG9eAaXIyYpRlEiksTUnZdwIlCXkRMg2l9cUr30ZDOoVS8QpQbCDdogOSqJ+RUShSuiXR8Qi2e0RfrsVucgkNogQ6w1IoB9kV8CAYsnJVzi/oenTJZjEh5qrKiUALpkiHGjB9WSIHQ80sAO/rwnr88w++HcOIqgnvhJ3/Ig3N6201Slud5pF2yVz4MxzY8bedetqNil5ffosYiU7dladOiKTVj8efZPx0cGq0dhpAZFVhehlXyu4EA24NRgKYvAIc0xWVVm49IBaMpDDI/nh24uF9fBPt2+Apj5BY/ETpKS5tFqFaGkBjlL9KFL3l2DfnWC8AfTHlBXFlkH8tKPSN4so612QAmWuULtrVuQpV8DF40HNwJoR2Lyyy5aHrZtpdjHsp3OJI83QfCxH2yTYhes4eHAxi4ynZDSDolt6mrjx651mmlQCsJWJ5KdWHQwjqzgRP8q1/bCaDYdODhrz0K1JPl6YYA+dzwRP+rFeSQbzG0yGo12p7FZGpq36/Hq9C/HSw6WVDN3Lr8CUxZr1rDhtmAvaMJG5EyYDXpNGn9j2DJX76A1Ifu7KXCp8h+FTLPa1CIxJruNxEA6vFSdqA= - SONA_USER=snowplow + - ENCRYPTION_LABEL: diff --git a/.travis/deploy.sh b/.travis/deploy.sh index 05b06fe..884d020 100755 --- a/.travis/deploy.sh +++ b/.travis/deploy.sh @@ -18,6 +18,7 @@ pwd project_version=$(sbt version -Dsbt.log.noformat=true | perl -ne 'print $1 if /(\d+\.\d+\.\d+[^\r\n]*)/') if [ "${project_version}" == "${tag_version}" ]; then + ./.travis/deploy_docs.sh sbt +publish sbt +bintraySyncMavenCentral else diff --git a/.travis/deploy_docs.sh b/.travis/deploy_docs.sh new file mode 100755 index 0000000..164d44b --- /dev/null +++ b/.travis/deploy_docs.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +ENCRYPTED_KEY_VAR="encrypted_${ENCRYPTION_LABEL}_key" +ENCRYPTED_IV_VAR="encrypted_${ENCRYPTION_LABEL}_iv" +ENCRYPTED_KEY=${!ENCRYPTED_KEY_VAR} +ENCRYPTED_IV=${!ENCRYPTED_IV_VAR} + +git config --global user.name "$USER" +git config --global user.email "$TRAVIS_BUILD_NUMBER@$TRAVIS_COMMIT" + +openssl aes-256-cbc -K $ENCRYPTED_KEY -iv $ENCRYPTED_IV -in project/travis-deploy-key.enc -out project/travis-deploy-key -d +chmod 600 project/travis-deploy-key + +eval "$(ssh-agent -s)" +ssh-add project/travis-deploy-key + +sbt ghpagesPushSite diff --git a/README.md b/README.md index 2ea2a1d..a9e03f0 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,8 @@ Use this SDK with **[Apache Spark][spark]**, **[AWS Lambda][lambda]**, **[Apache ## Documentation -[Setup guide][setup-guide] and [User guide][user-guide] are available at [Snowplow wiki][snowplow-wiki]. +[Setup guide][setup-guide] and [User guide][user-guide] are available on the [Snowplow wiki][snowplow-wiki]. +The Scaladoc website of the project can be found [here][scala-doc]. ## Copyright and license @@ -39,6 +40,7 @@ limitations under the License. [setup-guide]: https://github.com/snowplow/snowplow/wiki/Scala-Analytics-SDK-setup [user-guide]: https://github.com/snowplow/snowplow/wiki/Scala-Analytics-SDK [snowplow-wiki]: https://github.com/snowplow/snowplow/wiki +[scala-doc]: http://snowplow.github.io/snowplow-scala-analytics-sdk/ [snowplow]: http://snowplowanalytics.com [enriched-events]: https://github.com/snowplow/snowplow/wiki/canonical-event-model diff --git a/build.sbt b/build.sbt index f2eedc7..9cd7b11 100644 --- a/build.sbt +++ b/build.sbt @@ -20,10 +20,14 @@ lazy val root = project.in(file(".")) scalaVersion := "2.12.8", crossScalaVersions := Seq("2.11.12", "2.12.8") )) + .enablePlugins(SiteScaladocPlugin) + .enablePlugins(GhpagesPlugin) + .enablePlugins(PreprocessPlugin) .settings(BuildSettings.buildSettings) .settings(BuildSettings.publishSettings) .settings(BuildSettings.mimaSettings) .settings(BuildSettings.scoverageSettings) + .settings(BuildSettings.ghPagesSettings) .settings(Seq( shellPrompt := { _ => name.value + " > " } )) diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index 1555638..4a0af3f 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -26,6 +26,12 @@ import com.typesafe.tools.mima.plugin.MimaPlugin // Scoverage plugin import scoverage.ScoverageKeys._ +import com.typesafe.sbt.sbtghpages.GhpagesPlugin.autoImport._ +import com.typesafe.sbt.site.SitePlugin.autoImport._ +import com.typesafe.sbt.site.SiteScaladocPlugin.autoImport._ +import com.typesafe.sbt.SbtGit.GitKeys.{gitBranch, gitRemoteRepo} +import com.typesafe.sbt.site.preprocess.PreprocessPlugin.autoImport._ + object BuildSettings { // Basic settings for our app @@ -90,4 +96,16 @@ object BuildSettings { (coverageReport dependsOn (test in Test)).value } ) + + val ghPagesSettings = Seq( + ghpagesPushSite := (ghpagesPushSite dependsOn makeSite).value, + ghpagesNoJekyll := false, + gitRemoteRepo := "git@github.com:snowplow/snowplow-scala-analytics-sdk.git", + gitBranch := Some("gh-pages"), + siteSubdirName in SiteScaladoc := s"${version.value}", + preprocessVars in Preprocess := Map("VERSION" -> version.value), + excludeFilter in ghpagesCleanSite := new FileFilter { + def accept(f: File) = true + } + ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 5888fe2..3db8a8c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1,6 @@ addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.5.0") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") \ No newline at end of file +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") +addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.0") +addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") +addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") diff --git a/project/travis-deploy-key.enc b/project/travis-deploy-key.enc new file mode 100644 index 0000000000000000000000000000000000000000..eed0d2caccf4bf61c3334644fd386a6d6d7be63d GIT binary patch literal 3392 zcmV-G4Zrd^K71EP&x&X82{Y_u!&G#1I`iMLx>|DqKu}2 z%n0xZl!KI4^+r%&pUI5&CQ3jSyE3(jfAwZ#a zuEe69dWg1ERljV4olFZZyyxe(9{8w*&Wa%67h*B|OAj^$!H1VXY|5{Dn0ovo(@oPx z|3Bg|I>4~~{f10cM4*8j6fC8wRWAADwVDMJ83GKHJ)^%NCt<}n6uxjt>FE>mr~4`d z0s-QneRfS|Za$DU=WW~S0n7~s78Ib#vKrf*Y>s(j^ps#?^8ax_n-?R#+ijDm^$MZL0D~XC;5SHc=Su3M-)o2`_s83u@AW#=tR>0F5V;RImF z>0nMB^?{1{1oxitG;h-pI!q++0~;~%#y2exd^Yhmq&xdP$5I<082Q2GP1|q-;ufMQ zKT|4aD9Q2cp}?G|MhJI%U|Vgj0;~SnH3H5LrDlN!vLWR>L8|~_r2-#Yv50d9%1G;D zz#|Kw>vfjmbbMxVkkVOxqKC#(l|QzU6tCDDp)scug5Ad>&|aQc*==Qv9Z7GYo$TrZ zmltn4k&dN~taE^;qj=x$NIA-cHb8mMMHJ^@AE5uVp2A7M`b(t%iNRa`$q9Q=Oh zP+?Kv@C(u?21M>2(M?w5A@v_wTz7M)JJ_T!?uL~?;+ES>wzL|pOZfr zyp&bYG9&ohv;^-=*>^rxaCV{}@~-x)wQalFIYq(DHvgjLONd7HIHqU%Q1w(5Hl{}F z$|=!^(U4QmDN`gWa$qW+HHSzQfx0qUf^SHcVKG=pOUa*867eYtSe_w z<><&*BBLn|Pi?B;(%OHWb%kYX@AB$W+XRv?tQMO-A~*xsOPjqw!$&jL9S7=P!2el^_gg_ z+=?e?CRTt~ni>Xmq$^`sZwu&`Nw{Cu5~26E212U#T*MM+YxFDaEiDxt=dG8vLn}dp zw3j4p1D_Sx)xuhlyUZ4^zUB=M?`7;X7t$v(&he=4R{AJ)c;E<1zlsjWjt7DcI++a$ zPRI#IQ7QO_37BVs?`XXTSi5vYTP=eTuMY@fxNJd8B?FpIaSs}2CwKl>qx2<2T49IR zD;Y;{Pw>K1Trp(AwOfKN>U`%bmq2>L?X>n-5~ z?lY-lp<9~Fck=3M55p{cL)^sLHNo1F+3Aacb=?W!$rNDDs0JPue$ZG)t)<*v0haZu zgpQ91kyNt*?^#zHK=BwSw*L1$r74AH%7S07nN4o=?WrEuhJaiKm0o-}x@Q!*hx@fM zNKH5zzmEcpyq4rLR9|&AQae6&VI2PMFWp4~V-oFE&9oR@06|a1<4qpUjIP2eeia@7 z`?||b1!qWRMdT8)w_2QKpXZk()tV*g-&Dq{MII&!YF^=TGQUm%OW-4y(dS8QS>RAE zt)t-kVl8?n#3RWsIYY4=3@bNqqUsjehfqDJNPkz1ZT;%THmwp*7b!;;M;;e+lbasb z8IO)s05>yXXqyfU}71!c)S; z6Ss3Gap#rxIE_soSy--Ru^18MFkNoA?6XnZty7Ud7!Xx^UC=H%uOcr?&_GiQZy1mZ zt@O`oNGnK$)Ke1RNZJY%THMfgA+@e!4dL9N5n#6*4iew~KKWYiMa3H7!QOFYA2-EPR)61i2z!)W5pcgSeIz(5#C*ha_=#FnnyJ4B zku|^}QRzZH@Q~^_FdbWu=1zd=CO5mlN9*w^AZ=HW+VN7+mp5y;+&$HCwdwrpN|~)2 zq^LY^@{I1?x3L~LqluXR9;@M{VbX76cf_gfonJ>1%8Uxpm@oy#<+iPjL8wCuaX69{ z9Fe@CQ(Ws=)3Ld6$~jaE(ArXw(!%I>g&W8gDm+)utQu%^{)U22P>+AjJXmUtt66&a z(_7=vd}tUu9gbSS4jQvdxKe~5wmg@TM1SG(j(Mb-?AV|X%K+t3gVzsfYe0;p7c)qG{58T=KP`BGx_u0U~E5PEp_~ZVT_*>*{Z_jS0mBNBs?Kpl2nIR!1O`7p41yR~XbF`l z<)T-CvA@RmXkWz$w&=E-~&u(o$2mfR^| zdYzB>xIaxV{+IqG2E-(l4vQ_-0=t%V8lg^UHnP&E`+hRabcR?tIxTh`w*I|L)3bVg z%stex{-*XNw;X+#d^PIY&62#7!>^9f4A|1xcau;gJRZ+&G)6S;AmgsE*m%gThg2t* zhmD@ONMoqAeX3HA_rwJow@M#ch>Yc2vPL#!G97Iv%Jo^}8$kJ&bFYygh0!`2UQ`p0 z@lQ~FLvEl|NEFQZ7`+ZLH=|35vo?ZM%$!c5idS=pfQ?{%S$vGgex5POfPOtTARmjZ z##j+wn2JLa$J>-cXr>{HLI%SCf0g~-Q(xhFZVzr#HT7w%G{Cl$PwLJX_;C?+&~Q#Y zL~Fp+1A!P&=UhgjSQD1S@)G!wKN)?#FDfV$a4sn4t}^JEKhL#)9*Vz z{W$W^0ZAXKT$@ciLGG8{Q~m5O66|ngLYtkO8XhKMIQlA7G1HxEc8%cAuD6uhr|aoG z*Kf2vKI>?V(b$6xUL0Qhd9}XR$~UIR~Ze}L$)sLgjaihz1ZszmbO>feVsB(lYmK|!z7Y?Q4lHosL W6i)%%ND-L%)XfP<+MMt$WX1J + + + + Project Documentation + + + +Go to the project documentation + + + From 583b2edfcfdd7ba0e32280056c0c9aea971a17f6 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Fri, 13 Sep 2019 14:36:44 +0300 Subject: [PATCH 09/12] Fix empty contexts and unstruct_event decoding (closes #92) --- .../SnowplowEvent.scala | 28 ++-- .../EventSpec.scala | 149 +++++++++++++++++- 2 files changed, 161 insertions(+), 16 deletions(-) diff --git a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala index 5184dbd..599c5a1 100644 --- a/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala +++ b/src/main/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/SnowplowEvent.scala @@ -41,6 +41,19 @@ object SnowplowEvent { } } + implicit final val unstructCirceEncoder: Encoder[UnstructEvent] = + Encoder.instance { unstructEvent: UnstructEvent => + if (unstructEvent.data.isEmpty) Json.Null + else JsonObject( + ("schema", Common.UnstructEventUri.toSchemaUri.asJson), + ("data", unstructEvent.data.asJson) + ).asJson + } + + implicit val unstructEventDecoder: Decoder[UnstructEvent] = deriveDecoder[UnstructEvent].recover { + case DecodingFailure(_, DownField("data") :: _) => UnstructEvent(None) + } + /** * A JSON representation of an atomic event's contexts or derived_contexts fields. * @@ -67,20 +80,7 @@ object SnowplowEvent { } implicit val contextsDecoder: Decoder[Contexts] = deriveDecoder[Contexts].recover { - case DecodingFailure(_, List(DownField("data"), DownField(_))) => Contexts(List()) - } - - implicit final val unstructCirceEncoder: Encoder[UnstructEvent] = - Encoder.instance { unstructEvent: UnstructEvent => - if (unstructEvent.data.isEmpty) Json.Null - else JsonObject( - ("schema", Common.UnstructEventUri.toSchemaUri.asJson), - ("data", unstructEvent.data.asJson) - ).asJson - } - - implicit val unstructEventDecoder: Decoder[UnstructEvent] = deriveDecoder[UnstructEvent].recover { - case DecodingFailure(_, List(DownField("data"), DownField(_))) => UnstructEvent(None) + case DecodingFailure(_, DownField("data") :: _) => Contexts(List()) } /** diff --git a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala index 8d95f1c..f44bd1f 100644 --- a/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala +++ b/src/test/scala/com.snowplowanalytics.snowplow.analytics.scalasdk/EventSpec.scala @@ -23,9 +23,10 @@ import cats.data.NonEmptyList import cats.syntax.either._ // circe -import io.circe.{Json, JsonObject} +import io.circe.{Json, JsonObject, Encoder, Decoder} import io.circe.syntax._ import io.circe.parser._ +import io.circe.generic.semiauto._ // Specs2 import org.specs2.mutable.Specification @@ -2306,7 +2307,8 @@ class EventSpec extends Specification { event_fingerprint = Some("e3dbfa9cca0412c3d4052863cefb547f"), true_tstamp = Some(Instant.parse("2013-11-26T00:03:57.886Z")) ) - val eventJson = event.toJson(false) + val eventJsonStr = event.toJson(false).noSpaces + val eventJson = parse(eventJsonStr).getOrElse(throw new RuntimeException("Error while converting to json")) eventJson.as[Event] must beRight(event) } @@ -2904,6 +2906,149 @@ class EventSpec extends Specification { val eventJson = event.toJson(false) eventJson.as[Event] must beRight(event) } + + "successfully decode object with event which has no contexts or unstruct_event" in { + case class Temp(event: Event) + implicit val tempClassJsonEncoder: Encoder[Temp] = deriveEncoder + implicit val tempClassJsonDecoder: Decoder[Temp] = deriveDecoder + val event = Event( + app_id = Some("angry-birds"), + platform = Some("web"), + etl_tstamp = Some(Instant.parse("2017-01-26T00:01:25.292Z")), + collector_tstamp = Instant.parse("2013-11-26T00:02:05Z"), + dvce_created_tstamp = Some(Instant.parse("2013-11-26T00:03:57.885Z")), + event = Some("page_view"), + event_id = UUID.fromString("c6ef3124-b53a-4b13-a233-0088f79dcbcb"), + txn_id = Some(41828), + name_tracker = Some("cloudfront-1"), + v_tracker = Some("js-2.1.0"), + v_collector = "clj-tomcat-0.1.0", + v_etl = "serde-0.5.2", + user_id = Some("jon.doe@email.com"), + user_ipaddress = Some("92.231.54.234"), + user_fingerprint = Some("2161814971"), + domain_userid = Some("bc2e92ec6c204a14"), + domain_sessionidx = Some(3), + network_userid = Some("ecdff4d0-9175-40ac-a8bb-325c49733607"), + geo_country = Some("US"), + geo_region = Some("TX"), + geo_city = Some("New York"), + geo_zipcode = Some("94109"), + geo_latitude = Some(37.443604), + geo_longitude = Some(-122.4124), + geo_region_name = Some("Florida"), + ip_isp = Some("FDN Communications"), + ip_organization = Some("Bouygues Telecom"), + ip_domain = Some("nuvox.net"), + ip_netspeed = Some("Cable/DSL"), + page_url = Some("http://www.snowplowanalytics.com"), + page_title = Some("On Analytics"), + page_referrer = None, + page_urlscheme = Some("http"), + page_urlhost = Some("www.snowplowanalytics.com"), + page_urlport = Some(80), + page_urlpath = Some("/product/index.html"), + page_urlquery = Some("id=GTM-DLRG"), + page_urlfragment = Some("4-conclusion"), + refr_urlscheme = None, + refr_urlhost = None, + refr_urlport = None, + refr_urlpath = None, + refr_urlquery = None, + refr_urlfragment = None, + refr_medium = None, + refr_source = None, + refr_term = None, + mkt_medium = None, + mkt_source = None, + mkt_term = None, + mkt_content = None, + mkt_campaign = None, + contexts = Contexts(List()), + se_category = None, + se_action = None, + se_label = None, + se_property = None, + se_value = None, + unstruct_event = UnstructEvent(None), + tr_orderid = None, + tr_affiliation = None, + tr_total = None, + tr_tax = None, + tr_shipping = None, + tr_city = None, + tr_state = None, + tr_country = None, + ti_orderid = None, + ti_sku = None, + ti_name = None, + ti_category = None, + ti_price = None, + ti_quantity = None, + pp_xoffset_min = None, + pp_xoffset_max = None, + pp_yoffset_min = None, + pp_yoffset_max = None, + useragent = None, + br_name = None, + br_family = None, + br_version = None, + br_type = None, + br_renderengine = None, + br_lang = None, + br_features_pdf = Some(true), + br_features_flash = Some(false), + br_features_java = None, + br_features_director = None, + br_features_quicktime = None, + br_features_realplayer = None, + br_features_windowsmedia = None, + br_features_gears = None, + br_features_silverlight = None, + br_cookies = None, + br_colordepth = None, + br_viewwidth = None, + br_viewheight = None, + os_name = None, + os_family = None, + os_manufacturer = None, + os_timezone = None, + dvce_type = None, + dvce_ismobile = None, + dvce_screenwidth = None, + dvce_screenheight = None, + doc_charset = None, + doc_width = None, + doc_height = None, + tr_currency = None, + tr_total_base = None, + tr_tax_base = None, + tr_shipping_base = None, + ti_currency = None, + ti_price_base = None, + base_currency = None, + geo_timezone = None, + mkt_clickid = None, + mkt_network = None, + etl_tags = None, + dvce_sent_tstamp = None, + refr_domain_userid = None, + refr_dvce_tstamp = None, + derived_contexts = Contexts(List()), + domain_sessionid = Some("2b15e5c8-d3b1-11e4-b9d6-1681e6b88ec1"), + derived_tstamp = Some(Instant.parse("2013-11-26T00:03:57.886Z")), + event_vendor = Some("com.snowplowanalytics.snowplow"), + event_name = Some("link_click"), + event_format = Some("jsonschema"), + event_version = Some("1-0-0"), + event_fingerprint = Some("e3dbfa9cca0412c3d4052863cefb547f"), + true_tstamp = Some(Instant.parse("2013-11-26T00:03:57.886Z")) + ) + val tempInstance = Temp(event) + val tempJsonStr = tempInstance.asJson.noSpaces + val tempJson = parse(tempJsonStr).getOrElse(throw new RuntimeException("Error while converting to json")) + tempJson.as[Temp].map(_.event) must beRight(event) + } } "The transformSchema method" should { From 34856cce2ead58e886d60e5b7423f150f49316be Mon Sep 17 00:00:00 2001 From: Dilyan Damyanov Date: Tue, 5 Nov 2019 15:13:13 +0000 Subject: [PATCH 10/12] Add Travis CI secret key (closes #93) --- project/travis-deploy-key.enc | Bin 3392 -> 3392 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/project/travis-deploy-key.enc b/project/travis-deploy-key.enc index eed0d2caccf4bf61c3334644fd386a6d6d7be63d..b31e9db6442064ae39eeabd8a0bbf291ad6a63cb 100644 GIT binary patch literal 3392 zcmV-G4Zrg3P2or886X9r=Qp81z2jxNk7`jG_?N(E2TGS-v0KHQE0NaZUN57f#xcuK35}UCPev<8`W?Yg z;5tf{dz~wviK=(Lj2G6dV#7|vbRJ*~-CNV+W?GG#J``C0mK|}aH$kU}1rA$$y+79B=HDh9ZXs=JIJb^nDmmao! z2_6dHoYUMMzy*lI{n9g8Rh;^aC8^$pHWV9okk}lGSeK^_cz~XZcOke)oYLHpjlRx_(wOA0}7 z`O4Z)%(kj!N9T-9J}ToQ$9W*tq@*NcqC$*L%k4*(cKf-FwhEfJI+rz56fT! zu!y^j3-S@gLsP2j{P?Fngd&7uE9TPsaW9A@dH6;zQE(pC)Dk4vmtO{^ZkZ*va zF)cKFSOm=|GkSHBOxeV0w!W#1<2Js;p|xH{Yobq1-8ObYIp`safaCZ_CN4^^p0Z1& znrn47%wAtPA->TT0|=t^kopJ&_;tuZAYWGWT{@wT)FM>e6z4rF%FK;lVgKl*{)`mc zeCzmf8)~G)KPEKTmPf@;h8BSe{VoeB$gm6OodSS#XmEXF{&? z_?D@Ke-D66POqJwhcA0on!1}F@vsStRjvfbTKi5Xxc=Gth0#hqgb#is$o`SGIB#0$ z9M4@`3mouF@bc%(${mmlBKx;Za<3H`?K=LHVSDAKa5 z0>p0ngJTG-_cOOx|KQSwBDYQHTPGA!RL>OtY%hn?Zaig|@ytP1hfqI!vZJxUx4k%5 zEPSO;nx>I}otH044Q0|m!Aro0g^zJC+`aS6VdFKF)p1k(3W%>1w<EQl!%M9wIW^ zYY4%%3}>HV1>a)ALvRil_#h_~ONm^0p(q{7 z%CWW@+%VJRI$I$$=P_5Ws`e#tb_93LMw=hgP0Q`@OB3pXGIu9^8*bpHGUG5Vzr2H- zD3{dW$Scb37ZpDA1sSxdxTrk}Sxi)T z=52ayv@CSJVYu15$N+7f5AUh=gihvmZ2Rb!)@5=1Ec;8F7eF9@c!#&O!Le)?paqA8 zS>ypruBdS`!Eg(ec8ijC)$2U7bBkxHB$o?gH*#J&Y1xhl#egF7!jlzeay?wWX}#{X z3NtB>^X8xz83_s4@Fi3B61W?a$fYKB8N+{g%7v*c7Lazrf3lDXtYaA=Lj~aiYBw=#YHD5cArp?RMI}WAP@OyrAi8>w6Z&~9dBox+Y zaNUZMbw9^qXKZ9WQt2{ewxlwc2LaB#@vsHquhsVh=TH~dI!=xjpiI9&F&x{@WK1J* zb_-=4a!6}hJd|aL#P1+@oU$wMD!Xd(TNv}kJ@ zuS-Pt9QbhL3=#iL%AvNuosYQNy>f5`+A3+UT1g9|TPD$_MUvsd$+$nF0-BWsRjTp# z?bG*QIXEA8tP0muj`D${;}%|m7J(*EWLMWxKI|&k${`MK(@y@zjXUuL^BbK%l7MB> z%BPAff;mQs*eU$F#9ur$9{lVGkWMb*q`uN4fVUP*d<+WgPlx;WypuP4fS8kx?$uhO#>?@dHv;54D}mf4FCmd zHOD(*Laus;HZ?N(O1er$9RI7_{$mk&eok*!9+urbX7ALA6#y@mhbiF*eLAX)S&PQ%fbkKcLsFJIV%8dzf+jC59hTX{WPu84a#t}ODdY- zXvMX|j=?}h02``Bksgr~`W=s`K#X+I=}Rj{u63*nQRYj_JJ4e%_PT>g(aYLv{KJ$| zy_zC@{pbR)9Q(a0r~YV_6iJbrp~4NZsHEY8qpRQkVOcgeG_W6pqp@aM4DOWLMVmTp zFi_i`@K}7H=Uvk?BVd(?0pdlvzv!`V#7cDR zl%fhDeRb#XK{!Q1AE;~;@M*=!=o0i==lZ={$9+UI#d*qAlan4?jcB5CniXcIUd5vH z5teeFn-L{k&^v;RY6ovv++sm1Db)yP8L^^9tl+{?>G8{XekLl;HpZyvZqqhHsyJ6{ zqkg^&(L|75JdQv}{UK9QPN9EK?G{?WeuLAdLrLa#49HprdnD9F@~Dv;M1X)($E|EOX>8W6UU-AeoYaNLJbuI zsNb*J$me1v8u&+T{h-H!0h6X~H0+4Sa$~`6)(ZUG%6}9CY>{jvq-nOt=xtHf6O66b z)x(~&TUvix3I^zebR~$Z5UkQ&rb^vEQAxcvvwR*zz68g@U%5GhTjGEKL)pB)fr@)S zB6%=CIYk7X4HT5AcTBL0((4yVB8dMKeI!3OCcDjwTO8nC>@{PDolXt4rpeUYL;NUQ zw~Fkx1l~ecOUhh5&DErCqf%!p(4#P^P@8Fgf+?Vgp4&_Nk2>#+74#2PRosi!6XSD4 z5*$&XBNt^65tSHUP*^J}FgWo2O<@Lb!?(o2*rkbp(c}2Fos()#bn{eF1~9rg#ds~o z7iSGZeh9kKaf=A{9$JRVSzF*>{Yy=exG>R7RN1|&w22X9Kyy31;$!dzOm%uS}NNK+^Y7C_mb&^7231$Em1 literal 3392 zcmV-G4Zrd^K71EP&x&X82{Y_u!&G#1I`iMLx>|DqKu}2 z%n0xZl!KI4^+r%&pUI5&CQ3jSyE3(jfAwZ#a zuEe69dWg1ERljV4olFZZyyxe(9{8w*&Wa%67h*B|OAj^$!H1VXY|5{Dn0ovo(@oPx z|3Bg|I>4~~{f10cM4*8j6fC8wRWAADwVDMJ83GKHJ)^%NCt<}n6uxjt>FE>mr~4`d z0s-QneRfS|Za$DU=WW~S0n7~s78Ib#vKrf*Y>s(j^ps#?^8ax_n-?R#+ijDm^$MZL0D~XC;5SHc=Su3M-)o2`_s83u@AW#=tR>0F5V;RImF z>0nMB^?{1{1oxitG;h-pI!q++0~;~%#y2exd^Yhmq&xdP$5I<082Q2GP1|q-;ufMQ zKT|4aD9Q2cp}?G|MhJI%U|Vgj0;~SnH3H5LrDlN!vLWR>L8|~_r2-#Yv50d9%1G;D zz#|Kw>vfjmbbMxVkkVOxqKC#(l|QzU6tCDDp)scug5Ad>&|aQc*==Qv9Z7GYo$TrZ zmltn4k&dN~taE^;qj=x$NIA-cHb8mMMHJ^@AE5uVp2A7M`b(t%iNRa`$q9Q=Oh zP+?Kv@C(u?21M>2(M?w5A@v_wTz7M)JJ_T!?uL~?;+ES>wzL|pOZfr zyp&bYG9&ohv;^-=*>^rxaCV{}@~-x)wQalFIYq(DHvgjLONd7HIHqU%Q1w(5Hl{}F z$|=!^(U4QmDN`gWa$qW+HHSzQfx0qUf^SHcVKG=pOUa*867eYtSe_w z<><&*BBLn|Pi?B;(%OHWb%kYX@AB$W+XRv?tQMO-A~*xsOPjqw!$&jL9S7=P!2el^_gg_ z+=?e?CRTt~ni>Xmq$^`sZwu&`Nw{Cu5~26E212U#T*MM+YxFDaEiDxt=dG8vLn}dp zw3j4p1D_Sx)xuhlyUZ4^zUB=M?`7;X7t$v(&he=4R{AJ)c;E<1zlsjWjt7DcI++a$ zPRI#IQ7QO_37BVs?`XXTSi5vYTP=eTuMY@fxNJd8B?FpIaSs}2CwKl>qx2<2T49IR zD;Y;{Pw>K1Trp(AwOfKN>U`%bmq2>L?X>n-5~ z?lY-lp<9~Fck=3M55p{cL)^sLHNo1F+3Aacb=?W!$rNDDs0JPue$ZG)t)<*v0haZu zgpQ91kyNt*?^#zHK=BwSw*L1$r74AH%7S07nN4o=?WrEuhJaiKm0o-}x@Q!*hx@fM zNKH5zzmEcpyq4rLR9|&AQae6&VI2PMFWp4~V-oFE&9oR@06|a1<4qpUjIP2eeia@7 z`?||b1!qWRMdT8)w_2QKpXZk()tV*g-&Dq{MII&!YF^=TGQUm%OW-4y(dS8QS>RAE zt)t-kVl8?n#3RWsIYY4=3@bNqqUsjehfqDJNPkz1ZT;%THmwp*7b!;;M;;e+lbasb z8IO)s05>yXXqyfU}71!c)S; z6Ss3Gap#rxIE_soSy--Ru^18MFkNoA?6XnZty7Ud7!Xx^UC=H%uOcr?&_GiQZy1mZ zt@O`oNGnK$)Ke1RNZJY%THMfgA+@e!4dL9N5n#6*4iew~KKWYiMa3H7!QOFYA2-EPR)61i2z!)W5pcgSeIz(5#C*ha_=#FnnyJ4B zku|^}QRzZH@Q~^_FdbWu=1zd=CO5mlN9*w^AZ=HW+VN7+mp5y;+&$HCwdwrpN|~)2 zq^LY^@{I1?x3L~LqluXR9;@M{VbX76cf_gfonJ>1%8Uxpm@oy#<+iPjL8wCuaX69{ z9Fe@CQ(Ws=)3Ld6$~jaE(ArXw(!%I>g&W8gDm+)utQu%^{)U22P>+AjJXmUtt66&a z(_7=vd}tUu9gbSS4jQvdxKe~5wmg@TM1SG(j(Mb-?AV|X%K+t3gVzsfYe0;p7c)qG{58T=KP`BGx_u0U~E5PEp_~ZVT_*>*{Z_jS0mBNBs?Kpl2nIR!1O`7p41yR~XbF`l z<)T-CvA@RmXkWz$w&=E-~&u(o$2mfR^| zdYzB>xIaxV{+IqG2E-(l4vQ_-0=t%V8lg^UHnP&E`+hRabcR?tIxTh`w*I|L)3bVg z%stex{-*XNw;X+#d^PIY&62#7!>^9f4A|1xcau;gJRZ+&G)6S;AmgsE*m%gThg2t* zhmD@ONMoqAeX3HA_rwJow@M#ch>Yc2vPL#!G97Iv%Jo^}8$kJ&bFYygh0!`2UQ`p0 z@lQ~FLvEl|NEFQZ7`+ZLH=|35vo?ZM%$!c5idS=pfQ?{%S$vGgex5POfPOtTARmjZ z##j+wn2JLa$J>-cXr>{HLI%SCf0g~-Q(xhFZVzr#HT7w%G{Cl$PwLJX_;C?+&~Q#Y zL~Fp+1A!P&=UhgjSQD1S@)G!wKN)?#FDfV$a4sn4t}^JEKhL#)9*Vz z{W$W^0ZAXKT$@ciLGG8{Q~m5O66|ngLYtkO8XhKMIQlA7G1HxEc8%cAuD6uhr|aoG z*Kf2vKI>?V(b$6xUL0Qhd9}XR$~UIR~Ze}L$)sLgjaihz1ZszmbO>feVsB(lYmK|!z7Y?Q4lHosL W6i)%%ND-L%)XfP<+MMt$WX1J Date: Tue, 5 Nov 2019 15:18:07 +0000 Subject: [PATCH 11/12] Add encryption label to .travis.yml (closes #94) --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8b78630..0d62f69 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,4 +23,4 @@ env: # SONA_PASS - secure: oDu2dXzektYr/7K5nw7EL2qDUR5AhO4Uz6XGHoOQsN1gJiovdsa5nJeDHgo2YFGpJljyTo+lABbxpGIFQpcnKGIG9eAaXIyYpRlEiksTUnZdwIlCXkRMg2l9cUr30ZDOoVS8QpQbCDdogOSqJ+RUShSuiXR8Qi2e0RfrsVucgkNogQ6w1IoB9kV8CAYsnJVzi/oenTJZjEh5qrKiUALpkiHGjB9WSIHQ80sAO/rwnr88w++HcOIqgnvhJ3/Ig3N6201Slud5pF2yVz4MxzY8bedetqNil5ffosYiU7dladOiKTVj8efZPx0cGq0dhpAZFVhehlXyu4EA24NRgKYvAIc0xWVVm49IBaMpDDI/nh24uF9fBPt2+Apj5BY/ETpKS5tFqFaGkBjlL9KFL3l2DfnWC8AfTHlBXFlkH8tKPSN4so612QAmWuULtrVuQpV8DF40HNwJoR2Lyyy5aHrZtpdjHsp3OJI83QfCxH2yTYhes4eHAxi4ynZDSDolt6mrjx651mmlQCsJWJ5KdWHQwjqzgRP8q1/bCaDYdODhrz0K1JPl6YYA+dzwRP+rFeSQbzG0yGo12p7FZGpq36/Hq9C/HSw6WVDN3Lr8CUxZr1rDhtmAvaMJG5EyYDXpNGn9j2DJX76A1Ifu7KXCp8h+FTLPa1CIxJruNxEA6vFSdqA= - SONA_USER=snowplow - - ENCRYPTION_LABEL: + - ENCRYPTION_LABEL: 15f61f58913b From 4a1d3b3978d40541f50a91359b7dd8ef70002b63 Mon Sep 17 00:00:00 2001 From: Enes Aldemir Date: Tue, 13 Aug 2019 13:28:01 +0300 Subject: [PATCH 12/12] Prepare for release --- CHANGELOG | 14 ++++++++++++++ README.md | 2 +- build.sbt | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 685b113..27e8f27 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,17 @@ +Version 1.0.0 (2019-11-06) +-------------------------- +Make parsing errors type-safe (#75) +Add function to create minimal event (#81) +Deprecate run manifest (#86) +Fix empty contexts and unstruct_event decoding bug (#92) +Integrate MiMa (#87) +Integrate scoverage (#90) +Integrate sbt-gh-pages to create GH Pages from Scaladoc (#91) +Remove Vagrant setup (#84) +Add Travis CI secret key (#93) +Add encryption label to .travis.yml (#94) +Extend copyright notice to 2019 (#85) + Version 0.4.2 (2019-08-06) -------------------------- Bump iglu-core to 0.5.1 (#73) diff --git a/README.md b/README.md index a9e03f0..cbce12f 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ limitations under the License. [license-image]: http://img.shields.io/badge/license-Apache--2-blue.svg?style=flat [license]: http://www.apache.org/licenses/LICENSE-2.0 -[release-image]: http://img.shields.io/badge/release-0.4.2-blue.svg?style=flat +[release-image]: http://img.shields.io/badge/release-1.0.0-blue.svg?style=flat [releases]: https://github.com/snowplow/snowplow-scala-analytics-sdk/releases [setup-guide]: https://github.com/snowplow/snowplow/wiki/Scala-Analytics-SDK-setup diff --git a/build.sbt b/build.sbt index 9cd7b11..0c07093 100644 --- a/build.sbt +++ b/build.sbt @@ -15,7 +15,7 @@ lazy val root = project.in(file(".")) .settings(Seq[Setting[_]]( name := "snowplow-scala-analytics-sdk", organization := "com.snowplowanalytics", - version := "0.4.2", + version := "1.0.0", description := "Scala analytics SDK for Snowplow", scalaVersion := "2.12.8", crossScalaVersions := Seq("2.11.12", "2.12.8")