Skip to content

Commit

Permalink
Make errors type-safe
Browse files Browse the repository at this point in the history
  • Loading branch information
aldemirenes committed Jul 5, 2019
1 parent e02f11f commit 89af657
Show file tree
Hide file tree
Showing 7 changed files with 111 additions and 32 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package com.snowplowanalytics.snowplow.analytics.scalasdk

/**
* Represents error during parsing TSV event
*/
sealed trait ParsingError

object ParsingError {

/**
* Represents error which given line is not a TSV
*/
case object NonTSVPayload extends ParsingError

/**
* Represents error which number of given columns is not equal
* to number of expected columns
* @param expectedNum expected number of columns
* @param gotNum number of given columns
*/
case class ColumnNumberMismatch(expectedNum: Int, gotNum: Int) extends ParsingError

/**
* Represents cases where value in a field is not valid
* e.g invalid timestamp, invalid UUID
* @param key key of field
* @param value value of the field
*/
case class InvalidValue(key: String, value: String) extends ParsingError

/**
* Represents unexpected errors
* @param error error message
*/
case class UnexpectedError(error: String) extends ParsingError
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ package com.snowplowanalytics.snowplow.analytics.scalasdk.decode
import shapeless._
import shapeless.ops.record._
import shapeless.ops.hlist._

import Parser._
import cats.data.{NonEmptyList, Validated}
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.{ColumnNumberMismatch, NonTSVPayload}

private[scalasdk] trait Parser[A] extends Serializable {
/** Heterogeneous TSV values */
Expand All @@ -33,9 +33,16 @@ private[scalasdk] trait Parser[A] extends Serializable {

def parse(row: String): DecodeResult[A] = {
val values = row.split("\t", -1)
val zipped = knownKeys.zipAll(values, UnknownKeyPlaceholder, ValueIsMissingPlaceholder)
val decoded = decoder(zipped)
decoded.map { decodedValue => generic.from(decodedValue) }
if (values.length == 1) {
Validated.Invalid(NonEmptyList.of(NonTSVPayload))
}
else if (values.length != knownKeys.length) {
Validated.Invalid(NonEmptyList.of(ColumnNumberMismatch(knownKeys.length, values.length)))
} else {
val zipped = knownKeys.zip(values)
val decoded = decoder(zipped)
decoded.map { decodedValue => generic.from(decodedValue) }
}
}
}

Expand All @@ -61,11 +68,6 @@ object Parser {
}
}

/** Key name that will be used if TSV has more columns than a class */
val UnknownKeyPlaceholder = 'UnknownKey
/** Value that will be used if class has more fields than a TSV */
val ValueIsMissingPlaceholder = "VALUE IS MISSING"

/** Derive a TSV parser for `A` */
private[scalasdk] def deriveFor[A]: DeriveParser[A] =
new DeriveParser[A] {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import shapeless._
import cats.syntax.validated._
import cats.syntax.either._
import cats.syntax.apply._
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.UnexpectedError

/**
* Type class to decode List of keys-value pairs into HList
Expand All @@ -41,15 +42,15 @@ private[scalasdk] object RowDecoder {
row match {
case h :: t =>
val hv: DecodeResult[H] =
ValueDecoder[H].parse(h).leftMap(_._2).toValidatedNel
ValueDecoder[H].parse(h).toValidatedNel
val tv = RowDecoder[T].apply(t)
(hv, tv).mapN { _ :: _ }
case Nil => "Not enough values, format is invalid".invalidNel
case Nil => UnexpectedError("Not enough values, format is invalid").invalidNel
}

implicit val hnilFromRow: RowDecoder[HNil] = fromFunc {
case Nil => HNil.validNel
case rows => s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}".invalidNel
case rows => UnexpectedError(s"No more values expected, following provided: ${rows.map(_._2).mkString(", ")}").invalidNel
}

implicit def hconsFromRow[H: ValueDecoder, T <: HList: RowDecoder]: RowDecoder[H :: T] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import io.circe.{Error, Json}
// This library
import com.snowplowanalytics.snowplow.analytics.scalasdk.Common.{ContextsCriterion, UnstructEventCriterion}
import com.snowplowanalytics.snowplow.analytics.scalasdk.SnowplowEvent.{Contexts, UnstructEvent}
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.InvalidValue

private[decode] trait ValueDecoder[A] {
def parse(column: (Key, String)): DecodedValue[A]
Expand All @@ -50,7 +51,7 @@ private[decode] object ValueDecoder {
implicit final val stringColumnDecoder: ValueDecoder[String] =
fromFunc[String] {
case (key, value) =>
if (value.isEmpty) (key, s"Field $key cannot be empty").asLeft else value.asRight
if (value.isEmpty) InvalidValue(key.toString, s"Field $key cannot be empty").asLeft else value.asRight
}

implicit final val stringOptionColumnDecoder: ValueDecoder[Option[String]] =
Expand All @@ -67,21 +68,21 @@ private[decode] object ValueDecoder {
value.toInt.some.asRight
} catch {
case _: NumberFormatException =>
(key, s"Cannot parse key $key with value $value into integer").asLeft
InvalidValue(key.toString, s"Cannot parse key $key with value $value into integer").asLeft
}
}

implicit final val uuidColumnDecoder: ValueDecoder[UUID] =
fromFunc[UUID] {
case (key, value) =>
if (value.isEmpty)
(key, s"Field $key cannot be empty").asLeft
InvalidValue(key.toString, s"Field $key cannot be empty").asLeft
else
try {
UUID.fromString(value).asRight[(Key, String)]
UUID.fromString(value).asRight[InvalidValue]
} catch {
case _: IllegalArgumentException =>
(key, s"Cannot parse key $key with value $value into UUID").asLeft
InvalidValue(key.toString, s"Cannot parse key $key with value $value into UUID").asLeft
}
}

Expand All @@ -92,7 +93,7 @@ private[decode] object ValueDecoder {
case "0" => false.some.asRight
case "1" => true.some.asRight
case "" => none[Boolean].asRight
case _ => (key, s"Cannot parse key $key with value $value into boolean").asLeft
case _ => InvalidValue(key.toString, s"Cannot parse key $key with value $value into boolean").asLeft
}
}

Expand All @@ -105,22 +106,22 @@ private[decode] object ValueDecoder {
value.toDouble.some.asRight
} catch {
case _: NumberFormatException =>
(key, s"Cannot parse key $key with value $value into double").asLeft
InvalidValue(key.toString, s"Cannot parse key $key with value $value into double").asLeft
}
}

implicit final val instantColumnDecoder: ValueDecoder[Instant] =
fromFunc[Instant] {
case (key, value) =>
if (value.isEmpty)
(key, s"Field $key cannot be empty").asLeft
InvalidValue(key.toString, s"Field $key cannot be empty").asLeft
else {
val tstamp = reformatTstamp(value)
try {
Instant.parse(tstamp).asRight
} catch {
case _: DateTimeParseException =>
(key, s"Cannot parse key $key with value $value into datetime").asLeft
InvalidValue(key.toString, s"Cannot parse key $key with value $value into datetime").asLeft
}
}
}
Expand All @@ -129,50 +130,50 @@ private[decode] object ValueDecoder {
fromFunc[Option[Instant]] {
case (key, value) =>
if (value.isEmpty)
none[Instant].asRight[(Key, String)]
none[Instant].asRight[InvalidValue]
else {
val tstamp = reformatTstamp(value)
try {
Instant.parse(tstamp).some.asRight
} catch {
case _: DateTimeParseException =>
(key, s"Cannot parse key $key with value $value into datetime").asLeft
InvalidValue(key.toString, s"Cannot parse key $key with value $value into datetime").asLeft
}
}
}

implicit final val unstructuredJson: ValueDecoder[UnstructEvent] =
fromFunc[UnstructEvent] {
case (key, value) =>
def asLeft(error: Error): (Key, String) = (key, error.show)
def asLeft(error: Error): InvalidValue = InvalidValue(key.toString(), error.show)
if (value.isEmpty)
UnstructEvent(None).asRight[(Key, String)]
UnstructEvent(None).asRight[InvalidValue]
else
parseJson(value)
.flatMap(_.as[SelfDescribingData[Json]])
.leftMap(asLeft) match {
case Right(SelfDescribingData(schema, data)) if UnstructEventCriterion.matches(schema) =>
data.as[SelfDescribingData[Json]].leftMap(asLeft).map(_.some).map(UnstructEvent.apply)
case Right(SelfDescribingData(schema, _)) =>
(key, s"Unknown payload: ${schema.toSchemaUri}").asLeft[UnstructEvent]
InvalidValue(key.toString, s"Unknown payload: ${schema.toSchemaUri}").asLeft[UnstructEvent]
case Left(error) => error.asLeft[UnstructEvent]
}
}

implicit final val contexts: ValueDecoder[Contexts] =
fromFunc[Contexts] {
case (key, value) =>
def asLeft(error: Error): (Key, String) = (key, error.show)
def asLeft(error: Error): InvalidValue = InvalidValue(key.toString, error.show)
if (value.isEmpty)
Contexts(List()).asRight[(Key, String)]
Contexts(List()).asRight[InvalidValue]
else
parseJson(value)
.flatMap(_.as[SelfDescribingData[Json]])
.leftMap(asLeft) match {
case Right(SelfDescribingData(schema, data)) if ContextsCriterion.matches(schema) =>
data.as[List[SelfDescribingData[Json]]].leftMap(asLeft).map(Contexts.apply)
case Right(SelfDescribingData(schema, _)) =>
(key, s"Unknown payload: ${schema.toSchemaUri}").asLeft[Contexts]
InvalidValue(key.toString, s"Unknown payload: ${schema.toSchemaUri}").asLeft[Contexts]
case Left(error) => error.asLeft[Contexts]
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@
package com.snowplowanalytics.snowplow.analytics.scalasdk

import cats.data.ValidatedNel
import com.snowplowanalytics.snowplow.analytics.scalasdk.ParsingError.InvalidValue

package object decode {
/** Expected name of the field */
type Key = Symbol

/** Result of single-value parsing */
type DecodedValue[A] = Either[(Key, String), A]
type DecodedValue[A] = Either[InvalidValue, A]

/** Result of TSV line parsing, which is either an event or non empty list of parse errors */
type DecodeResult[A] = ValidatedNel[String, A]
type DecodeResult[A] = ValidatedNel[ParsingError, A]
}
Loading

0 comments on commit 89af657

Please sign in to comment.