diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 16f57eef..bba8aab6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,9 +9,9 @@ name: Continuous Integration on: pull_request: - branches: ['*'] + branches: ['**'] push: - branches: ['*'] + branches: ['**'] tags: [v*] env: @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6, 2.12.15] + scala: [2.13.7, 2.12.15] java: [adopt@1.8, adopt@1.11] runs-on: ${{ matrix.os }} steps: @@ -33,7 +33,7 @@ jobs: fetch-depth: 0 - name: Setup Java and Scala - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v13 with: java-version: ${{ matrix.java }} @@ -52,11 +52,10 @@ jobs: - name: Check that workflows are up to date run: sbt ++${{ matrix.scala }} githubWorkflowCheck - - name: Build project - run: sbt ++${{ matrix.scala }} test + - run: sbt ++${{ matrix.scala }} test doc - name: Compress target directories - run: tar cf targets.tar core/js/target target main/target lambda-io-app/js/target aws-java-sdk2/target core/jvm/target test-kit/target lambda-io-app/jvm/target project/target + run: tar cf targets.tar target core/js/target core/jvm/target aws-java-sdk2/target project/target - name: Upload target directories uses: actions/upload-artifact@v2 @@ -71,7 +70,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6] + scala: [2.13.7] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: @@ -81,7 +80,7 @@ jobs: fetch-depth: 0 - name: Setup Java and Scala - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v13 with: java-version: ${{ matrix.java }} @@ -97,12 +96,12 @@ jobs: ~/Library/Caches/Coursier/v1 key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} - - name: Download target directories (2.13.6) + - name: Download target directories (2.13.7) uses: actions/download-artifact@v2 with: - name: target-${{ matrix.os }}-2.13.6-${{ matrix.java }} + name: target-${{ matrix.os }}-2.13.7-${{ matrix.java }} - - name: Inflate target directories (2.13.6) + - name: Inflate target directories (2.13.7) run: | tar xf targets.tar rm targets.tar @@ -122,4 +121,4 @@ jobs: PGP_SECRET: ${{ secrets.PGP_SECRET }} SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} - run: sbt ++${{ matrix.scala }} ci-release \ No newline at end of file + run: sbt ++${{ matrix.scala }} ci-release diff --git a/.github/workflows/clean.yml b/.github/workflows/clean.yml index b535fcc1..547aaa43 100644 --- a/.github/workflows/clean.yml +++ b/.github/workflows/clean.yml @@ -56,4 +56,4 @@ jobs: printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size ghapi -X DELETE $REPO/actions/artifacts/$id done - done \ No newline at end of file + done diff --git a/.mergify.yml b/.mergify.yml index 0f9d529d..81390431 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -1,8 +1,8 @@ queue_rules: - name: default conditions: - - status-success=Build and Test (ubuntu-latest, 2.13.6, adopt@1.8) - - status-success=Build and Test (ubuntu-latest, 2.13.6, adopt@1.11) + - status-success=Build and Test (ubuntu-latest, 2.13.7, adopt@1.8) + - status-success=Build and Test (ubuntu-latest, 2.13.7, adopt@1.11) - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt@1.8) - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt@1.11) @@ -25,8 +25,8 @@ pull_request_rules: - name: merge scala-steward's PRs conditions: - author=scala-steward - - status-success=Build and Test (ubuntu-latest, 2.13.6, adopt@1.8) - - status-success=Build and Test (ubuntu-latest, 2.13.6, adopt@1.11) + - status-success=Build and Test (ubuntu-latest, 2.13.7, adopt@1.8) + - status-success=Build and Test (ubuntu-latest, 2.13.7, adopt@1.11) - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt@1.8) - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt@1.11) actions: diff --git a/README.md b/README.md index ea10d250..1f9e58e9 100644 --- a/README.md +++ b/README.md @@ -5,63 +5,3 @@ Utility classes for working with the [Java AWS SDKs](https://github.com/aws/aws-sdk-java) from Scala using [fs2](https://github.com/functional-streams-for-scala/fs2). Projects including this library will also need to explicitly include the AWS SDK libraries they will rely on, to avoid inadvertently importing more libraries than are required. - -This library is essentially Dwolla’s [scala-aws-utils](https://github.com/Dwolla/scala-aws-utils) ported to fs2. - -## Artifacts - -#### Library - -```scala -"com.dwolla" %% "fs2-aws" % "2.0.0" -``` - -#### Core - -Non-AWS-specific utilities are published separately for the JVM and Scala.js. - -##### JVM - -```scala -"com.dwolla" %% "fs2-utils" % "2.0.0" -``` - -##### JS - -```scala -"com.dwolla" %%% "fs2-utils" % "2.0.0" -``` - -## Examples - -All examples assume the following imports. - -```scala -import cats.effect._ -import com.amazonaws.services.cloudformation._ -import com.amazonaws.services.cloudformation.model._ -``` - -### Paginate over an AWS resource - -Given an AWS Async client and a base request builder, obtain an fs2 `Stream` of the resource. - -```scala -val client: AmazonCloudFormationAsync = ??? -val requestFactory = () ⇒ new DescribeStackEventsRequest() -val x: Stream[IO, StackEvent] = requestFactory.fetchAll[IO](client.describeStackEventsAsync)(_.getStackEvents.asScala) -``` - -Note that settings can be changed inside the `() => Request` function. The pagination logic takes the result of calling the function and sets the next page token on the request before handing it to the AWS Async client. - -### Retrieve an AWS resource - -Given an AWS Async client and a request, obtain a [cats-effect `Async`](https://typelevel.org/cats-effect/typeclasses/async.html) that will contain the resource upon completion. - -For paginated resources, this retrieves the first page. For non-paginated resources, this retrieves the entire resource. - -```scala -val client: AmazonCloudFormationAsync = ??? -val req = new DescribeStackEventsRequest() -val x: IO[DescribeStackEventsResult] = req.executeVia[IO](client.describeStackEventsAsync) -``` diff --git a/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/kms/KmsAlg.scala b/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/kms/KmsAlg.scala index 9e9eeda0..0c6b9784 100644 --- a/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/kms/KmsAlg.scala +++ b/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/kms/KmsAlg.scala @@ -24,7 +24,7 @@ object KmsAlg { private def releaseKmsClient[F[_] : Sync](client: KmsAsyncClient): F[Unit] = Sync[F].delay(client.close()) - def resource[F[_] : Concurrent]: Resource[F, KmsAlg[F]] = + def resource[F[_] : Async]: Resource[F, KmsAlg[F]] = for { client <- Resource.make(acquireKmsClient[F])(releaseKmsClient[F]) } yield new KmsAlg[F] { diff --git a/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/package.scala b/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/package.scala index 185390e2..5cc1dca9 100644 --- a/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/package.scala +++ b/aws-java-sdk2/src/main/scala/com/dwolla/fs2aws/package.scala @@ -31,8 +31,8 @@ class PartiallyAppliedFromPublisherFRes[F[_], Res](publisher: Publisher[Res]) { res andThen (_.asScala) andThen Chunk.iterable andThen Stream.chunk def apply[O](extractor: Res => java.lang.Iterable[O]) - (implicit ev: ConcurrentEffect[F]): Stream[F, O] = - fromPublisher[F, Res](publisher) + (implicit ev: Async[F]): Stream[F, O] = + fromPublisher[F, Res](publisher, 1) .flatMap(toStream(extractor)) } @@ -41,14 +41,14 @@ class PartiallyAppliedEvalF[F[_]] { def apply[Req, Res, O](req: => Req) (client: Req => CompletableFuture[Res]) (extractor: Res => O) - (implicit ev: Concurrent[F]): F[O] = + (implicit ev: Async[F]): F[O] = cfToF[F](client(req)).map(extractor) } private[fs2aws] class PartialCompletableFutureToF[F[_]] { def apply[A](makeCf: => CompletableFuture[A]) - (implicit ev: Concurrent[F]): F[A] = - Concurrent.cancelableF[F, A] { cb => + (implicit ev: Async[F]): F[A] = { + Async[F].async { cb => val cf = makeCf cf.handle[Unit]((result, err) => err match { case null => @@ -61,7 +61,7 @@ private[fs2aws] class PartialCompletableFutureToF[F[_]] { cb(Left(ex)) }) - val cancelToken: CancelToken[F] = Sync[F].delay(cf.cancel(true)).void - cancelToken.pure[F] + Sync[F].delay(cf.cancel(true)).void.some.pure[F] } + } } diff --git a/build.sbt b/build.sbt index 422d3f25..6e13f0cf 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,5 @@ lazy val primaryName = "fs2-aws" -lazy val specs2Version = "4.13.0" -lazy val fs2Version = "2.5.10" +lazy val fs2Version = "3.2.2" inThisBuild(List( organization := "com.dwolla", @@ -14,18 +13,12 @@ inThisBuild(List( url("https://dwolla.com") ) ), - crossScalaVersions := Seq("2.13.6", "2.12.15"), + crossScalaVersions := Seq("2.13.7", "2.12.15"), scalaVersion := crossScalaVersions.value.head, startYear := Option(2018), - libraryDependencies ++= { - Seq( - "co.fs2" %%% "fs2-core" % fs2Version, - "org.specs2" %%% "specs2-core" % specs2Version % Test, - "org.specs2" %%% "specs2-cats" % "4.12.1" % Test, - ) - }, resolvers += Resolver.sonatypeRepo("releases"), + githubWorkflowBuild := Seq(WorkflowStep.Sbt(List("test", "doc"))), githubWorkflowJavaVersions := Seq("adopt@1.8", "adopt@1.11"), githubWorkflowTargetTags ++= Seq("v*"), githubWorkflowPublishTargetBranches := @@ -68,32 +61,16 @@ lazy val fs2Utils = crossProject(JSPlatform, JVMPlatform) .settings( name := "fs2-utils", description := "Helpful utility functions for fs2 streams", + libraryDependencies ++= Seq( + "co.fs2" %%% "fs2-core" % fs2Version, + "org.scalameta" %%% "munit" % "0.7.29" % Test, + "com.eed3si9n.expecty" %%% "expecty" % "0.15.4" % Test, + "org.typelevel" %%% "munit-cats-effect-3" % "1.0.6" % Test, + ) ) lazy val fs2UtilsJVM = fs2Utils.jvm -lazy val fs2AwsUtils = (project in file("main")) - .settings(compilerOptions: _*) - .settings( - name := primaryName, - description := "Utility classes for interacting with the AWS SDKs from Scala using fs2", - libraryDependencies ++= { - val awsSdkVersion = "1.12.116" - - Seq( - "co.fs2" %% "fs2-io" % fs2Version, - "com.chuusai" %% "shapeless" % "2.3.7", - "org.scala-lang.modules" %% "scala-collection-compat" % "2.6.0", - "com.amazonaws" % "aws-java-sdk-core" % awsSdkVersion, - "com.amazonaws" % "aws-java-sdk-kms" % awsSdkVersion % Provided, - "com.amazonaws" % "aws-java-sdk-cloudformation" % awsSdkVersion % Provided, - "com.amazonaws" % "aws-java-sdk-s3" % awsSdkVersion % Provided, - "org.specs2" %% "specs2-mock" % specs2Version % Test, - ) - }, - ) - .dependsOn(fs2UtilsJVM) - lazy val fs2Aws2Utils = (project in file("aws-java-sdk2")) .settings(compilerOptions: _*) .settings( @@ -103,63 +80,14 @@ lazy val fs2Aws2Utils = (project in file("aws-java-sdk2")) Seq( "co.fs2" %% "fs2-reactive-streams" % fs2Version, "org.typelevel" %% "cats-tagless-macros" % "0.14.0", - "org.scala-lang.modules" %% "scala-collection-compat" % "2.1.1", + "org.scala-lang.modules" %% "scala-collection-compat" % "2.6.0", "software.amazon.awssdk" % "kms" % "2.17.86" % Provided, ) }, ) -lazy val lambdaIOApp = crossProject(JSPlatform, JVMPlatform) - .crossType(CrossType.Full) - .in(file("lambda-io-app")) - .settings(compilerOptions: _*) - .settings( - name := primaryName + "-lambda-io-app", - libraryDependencies ++= { - val circeVersion = "0.14.1" - Seq( - "io.circe" %%% "circe-literal" % circeVersion, - "io.circe" %%% "circe-generic-extras" % circeVersion, - "io.circe" %%% "circe-parser" % circeVersion, - "io.circe" %%% "circe-generic-extras" % circeVersion, - ) - }, - ) - .jvmSettings( - description := "IOApp for AWS Lambda Java runtime", - libraryDependencies ++= { - Seq( - "com.amazonaws" % "aws-lambda-java-core" % "1.2.1", - "com.amazonaws" % "aws-lambda-java-log4j2" % "1.2.0", - "co.fs2" %% "fs2-io" % fs2Version, - "org.typelevel" %% "log4cats-slf4j" % "1.3.1", - "org.apache.logging.log4j" % "log4j-slf4j-impl" % "2.14.1", - "org.apache.logging.log4j" % "log4j-api" % "2.14.1", - "org.typelevel" %% "cats-tagless-macros" % "0.14.0", - "org.tpolecat" %% "natchez-core" % "0.0.26", - "org.specs2" %% "specs2-scalacheck" % specs2Version, - ) - }, - ) - .jsSettings( - description := "IOApp for AWS Lambda Node runtime", - Compile / npmDependencies += ("@types/aws-lambda" -> "8.10.59"), - scalacOptions += "-Wconf:src=src_managed/.*:s", - stOutputPackage := "jsdep", - stMinimize := Selection.AllExcept("@types/aws-lambda"), - ) - .jsConfigure(_.enablePlugins(ScalablyTypedConverterGenSourcePlugin)) - -lazy val fs2TestKit: Project = (project in file("test-kit")) - .settings(compilerOptions: _*) - .settings( - name := primaryName + "-testkit", - description := "Test implementations of fs2-aws classes", - ) - .dependsOn(fs2AwsUtils) - lazy val `fs2-aws` = (project in file(".")) .settings( publish / skip := true, ) - .aggregate(fs2UtilsJVM, fs2Utils.js, fs2AwsUtils, fs2Aws2Utils, fs2TestKit, lambdaIOApp.jvm, lambdaIOApp.js) + .aggregate(fs2UtilsJVM, fs2Utils.js, fs2Aws2Utils) diff --git a/core/jvm/src/main/scala/com/dwolla/fs2utils/hashing/Sha256Pipe.scala b/core/jvm/src/main/scala/com/dwolla/fs2utils/hashing/Sha256Pipe.scala index eef4e1b7..1bf00c08 100644 --- a/core/jvm/src/main/scala/com/dwolla/fs2utils/hashing/Sha256Pipe.scala +++ b/core/jvm/src/main/scala/com/dwolla/fs2utils/hashing/Sha256Pipe.scala @@ -1,10 +1,10 @@ package com.dwolla.fs2utils.hashing -import java.security.MessageDigest - import cats.effect._ -import cats.effect.concurrent.Deferred import fs2._ +import scodec.bits.ByteVector + +import java.security.MessageDigest object Sha256Pipe { def apply[F[_] : Sync](promisedHexString: Deferred[F, Either[Throwable, String]]): Pipe[F, Byte, Byte] = { @@ -13,9 +13,9 @@ object Sha256Pipe { case None => Pull.eval(Sync[F].delay(digest.digest())).map(_.toHexString) case Some((c: Chunk[Byte], rest: Stream[F, Byte])) => - val bytes = c.toBytes + val bytes: ByteVector = c.toByteVector for { - _ <- Pull.eval(Sync[F].delay(digest.update(bytes.values, bytes.offset, bytes.length))) + _ <- Pull.eval(Sync[F].delay(digest.update(bytes.toByteBuffer))) _ <- Pull.output(c) hexString <- pull(digest)(rest) } yield hexString diff --git a/core/jvm/src/test/scala/com/dwolla/fs2utils/hashing/Sha256PipeSpec.scala b/core/jvm/src/test/scala/com/dwolla/fs2utils/hashing/Sha256PipeSpec.scala index 75debe36..f7c92134 100644 --- a/core/jvm/src/test/scala/com/dwolla/fs2utils/hashing/Sha256PipeSpec.scala +++ b/core/jvm/src/test/scala/com/dwolla/fs2utils/hashing/Sha256PipeSpec.scala @@ -1,58 +1,48 @@ package com.dwolla.fs2utils.hashing -import java.security.MessageDigest - import cats.effect._ -import cats.effect.concurrent.Deferred import cats.implicits._ +import com.eed3si9n.expecty.Expecty.expect import fs2._ -import org.specs2.execute.AsResult -import org.specs2.matcher._ -import org.specs2.mutable.Specification -import org.specs2.specification.core.{AsExecution, Execution} +import munit.CatsEffectSuite +import java.security.MessageDigest import scala.concurrent.duration._ -class Sha256PipeSpec extends Specification with IOMatchers { +class Sha256PipeSpec extends CatsEffectSuite { - "Sha256Pipe" should { - "pass the bytes through unchanged while returning the hash of the bytes" >> { + test("Sha256Pipe should pass the bytes through unchanged while returning the hash of the bytes") { val example = "Dwolla 🧟‍♀️" for { expectedDigest <- IO(MessageDigest.getInstance("SHA-256").digest(example.getBytes("UTF-8"))) deferredDigest <- Deferred[IO, Either[Throwable, String]] str <- Stream.emit(example) - .through(text.utf8Encode) + .through(text.utf8.encode) .through(Sha256Pipe(deferredDigest)) - .through(text.utf8Decode) + .through(text.utf8.decode) .compile .toList calculatedDigest <- deferredDigest.get } yield { - str must beEqualTo(List(example)) - calculatedDigest must beRight(expectedDigest.toHexString) + expect(str == List(example)) + expect(calculatedDigest == Right(expectedDigest.toHexString)) } } - "fail the deferred if the stream raises an exception" >> { + test("fail the deferred if the stream raises an exception") { for { deferred <- Deferred[IO, Either[Throwable, String]] exception = new RuntimeException("boom") {} failure <- (Stream.eval(IO("hello world")) ++ Stream.raiseError[IO](exception)) - .through(text.utf8Encode) + .through(text.utf8.encode) .through(Sha256Pipe(deferred)) .compile .drain .attempt output <- deferred.get.flatMap(_.liftTo[IO]).timeout(2.seconds).attempt } yield { - failure must beLeft(exception) - output must beLeft(InputStreamFailed(exception)) + expect(failure == Left(exception)) + expect(output == Left(InputStreamFailed(exception))) } - } - } - - private implicit def ioAsExecution[R: AsResult]: AsExecution[IO[R]] = new AsExecution[IO[R]] { - def execute(r: => IO[R]): Execution = Execution.withEnvAsync(env => (IO.shift(env.executionContext) >> r).unsafeToFuture()) } } diff --git a/core/shared/src/test/scala/com/dwolla/fs2utils/PaginationSpec.scala b/core/shared/src/test/scala/com/dwolla/fs2utils/PaginationSpec.scala index bf062929..c909d7bf 100644 --- a/core/shared/src/test/scala/com/dwolla/fs2utils/PaginationSpec.scala +++ b/core/shared/src/test/scala/com/dwolla/fs2utils/PaginationSpec.scala @@ -2,10 +2,10 @@ package com.dwolla.fs2utils import cats.effect.IO import fs2._ -import org.specs2.matcher.IOMatchers -import org.specs2.mutable.Specification +import com.eed3si9n.expecty.Expecty.expect +import munit.CatsEffectSuite -class PaginationSpec extends Specification with IOMatchers { +class PaginationSpec extends CatsEffectSuite { val unfoldFunction: Option[Int] => IO[(Chunk[Int], Option[Int])] = nextToken => { @@ -15,18 +15,15 @@ class PaginationSpec extends Specification with IOMatchers { IO.pure((Chunk.seq(ints), if (offset < 3) Option(offset + 1) else None)) } - "Pagination" should { - "unfold the given thing" >> { - val stream = Pagination.offsetUnfoldChunkEval(unfoldFunction).take(5) + test("Pagination should unfold the given thing") { + val stream = Pagination.offsetUnfoldChunkEval(unfoldFunction).take(5) - stream.compile.toList must returnValue(equalTo(0 until 5)) - } + stream.compile.toList.map(output => expect(output == (0 until 5))) + } - "stop unfolding when None is returned as the token value" >> { - val stream = Pagination.offsetUnfoldChunkEval(unfoldFunction) + test("Pagination should stop unfolding when None is returned as the token value") { + val stream = Pagination.offsetUnfoldChunkEval(unfoldFunction) - stream.compile.toList must returnValue(equalTo(0 until 12)) - } + stream.compile.toList.map(output => expect(output == (0 until 12))) } - } diff --git a/core/shared/src/test/scala/com/dwolla/fs2utils/hashing/HexStringsSpec.scala b/core/shared/src/test/scala/com/dwolla/fs2utils/hashing/HexStringsSpec.scala index 25139b36..1ac7f9a7 100644 --- a/core/shared/src/test/scala/com/dwolla/fs2utils/hashing/HexStringsSpec.scala +++ b/core/shared/src/test/scala/com/dwolla/fs2utils/hashing/HexStringsSpec.scala @@ -1,19 +1,27 @@ package com.dwolla.fs2utils.hashing import fs2._ -import org.specs2.mutable.Specification +import com.eed3si9n.expecty.Expecty.expect +import munit.FunSuite -class HexStringsSpec extends Specification { +class HexStringsSpec extends FunSuite { - "hexStringPipe" should { - "hex some things" >> { - Stream.emit(0x2).map(_.toByte).chunks.through(hexStringPipe).compile.toList must beEqualTo("02".toCharArray.toList) - } + test("hexStringPipe should hex some things") { + val output = Stream + .emit(0x2) + .map(_.toByte) + .chunks + .through(hexStringPipe) + .compile + .toList - "calculate the hex string from the bytes of emoji" >> { - val example = "👩‍💻" - new String(Stream.emit(example).through(text.utf8Encode).chunks.through(hexStringPipe).compile.toList.toArray) must beEqualTo(example.getBytes("UTF-8").toHexString) - } + expect(output == "02".toCharArray.toList) + } + + test("hexStringPipe should calculate the hex string from the bytes of emoji") { + val example = "👩‍💻" + val output = new String(Stream.emit(example).through(text.utf8.encode).chunks.through(hexStringPipe).compile.toList.toArray) + expect(output == example.getBytes("UTF-8").toHexString) } } diff --git a/lambda-io-app/js/src/main/scala/com/dwolla/lambda/IOLambda.scala b/lambda-io-app/js/src/main/scala/com/dwolla/lambda/IOLambda.scala deleted file mode 100644 index d0447b95..00000000 --- a/lambda-io-app/js/src/main/scala/com/dwolla/lambda/IOLambda.scala +++ /dev/null @@ -1,19 +0,0 @@ -package com.dwolla.lambda - -import cats.effect._ -import jsdep.awsLambda.handlerMod._ - -import scala.concurrent.ExecutionContext -import scala.scalajs.js -import scala.scalajs.js.JSConverters._ - -trait IOLambda[A <: js.Any, B] { - private implicit val executionContext: ExecutionContext = ExecutionContext.global - protected implicit def contextShift: ContextShift[IO] = cats.effect.IO.contextShift(executionContext) - protected implicit def timer: Timer[IO] = cats.effect.IO.timer(executionContext) - - def handleRequest(a: A, context: Context): IO[B] - - final val handler: Handler[A, B] = (a, context, _) => - handleRequest(a, context).unsafeToFuture().toJSPromise -} diff --git a/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/IOLambda.scala b/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/IOLambda.scala deleted file mode 100644 index 4f5750a2..00000000 --- a/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/IOLambda.scala +++ /dev/null @@ -1,154 +0,0 @@ -package com.dwolla.lambda - -import cats._ -import cats.data._ -import cats.effect._ -import cats.implicits._ -import cats.tagless._ -import cats.tagless.implicits._ -import com.amazonaws.services.lambda.runtime._ -import com.dwolla.lambda.IOLambda._ -import fs2.Stream -import fs2.io.writeOutputStream -import fs2.text.utf8Encode -import io.circe._ -import natchez._ -import org.typelevel.log4cats.Logger -import org.typelevel.log4cats.slf4j.Slf4jLogger - -import java.io._ -import java.net.URI -import scala.concurrent.ExecutionContext - -abstract class IOLambda[A, B](implicit - LR: LambdaReader[Kleisli[Kleisli[IO, Span[IO], *], LambdaReaderEnvironment[Kleisli[IO, Span[IO], *]], *], A]) extends ResourceIOLambda[BlockerK, A, B] { - def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](blocker: Blocker) - (req: A, context: Context): F[LambdaResponse[B]] - - override def resources[F[_] : ConcurrentEffect : ContextShift : Logger : Timer](blocker: Blocker) - (req: A, context: Context): Resource[F, BlockerK[F]] = - Resource.pure[F, BlockerK[F]](BlockerK[F](blocker)) - - override def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](resources: BlockerK[F]) - (req: A, context: Context): F[LambdaResponse[B]] = - handleRequestF[F](resources.blocker)(req, context) -} - -/** - * This class is analogous to the cats-effect `IOApp`, but for - * AWS Lambda functions instead of standalone applications. - * - * Implementers can decide whether they want to read from the Lambda's - * input or write to its output by picking concrete types for the `A` - * and `B` type variables, respectively. If I/O is desired, the type - * should have a Circe codec. If no I/O is desired, the type variable - * should be set to `Unit`. - * - * For example, to read to the JSON ADT and avoid any output written - * to the Lambda's OutputStream, one could use: - * - * {{{ - * class JsonReadingLambda extends IOLambda[Json, Unit] - * }}} - * - * Concrete classes must implement the `handleRequestF` method, which - * is written using an abstract effect type. Instances of the - * `Concurrent`, `ContextShift`, `Timer`, and `Trace` typeclasses are - * made available implicitly, along with an explicit blocking - * execution context. (`F` is generic so that the underlying - * implementation can treat it as `Kleisli[IO, Span[IO], *]`, which - * is also why `Concurrent` is the most powerful effect typeclass - * available—no `Effect` is available for `Kleisli`.) - * - * IOLambda should be considered experimental at this point. - */ -abstract class ResourceIOLambda[Resources[_[_]] : InvariantK, A, B](printer: Printer = Defaults.printer, - executionContext: ExecutionContext = Defaults.executionContext) - (implicit - LR: LambdaReader[Kleisli[Kleisli[IO, Span[IO], *], LambdaReaderEnvironment[Kleisli[IO, Span[IO], *]], *], A]) extends RequestStreamHandler { - protected implicit def contextShift: ContextShift[IO] = cats.effect.IO.contextShift(executionContext) - protected implicit def timer: Timer[IO] = cats.effect.IO.timer(executionContext) - protected implicit def logger: Logger[IO] = Slf4jLogger.getLoggerFromName[IO]("LambdaLogger") - private implicit val kleisliLogger: Logger[Kleisli[IO, Span[IO], *]] = Logger[IO].mapK(Kleisli.liftK) - - def resources[F[_] : ConcurrentEffect : ContextShift : Logger : Timer](blocker: Blocker) - (req: A, context: Context): Resource[F, Resources[F]] - - def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](resources: Resources[F]) - (req: A, context: Context): F[LambdaResponse[B]] - - val tracingEntryPoint: Resource[IO, EntryPoint[IO]] = NoOpEntryPoint[IO] - - private def printToStream(b: B)(implicit encoder: Encoder[B]): Stream[IO, Byte] = { - import io.circe.syntax._ - Stream.emit(printer.print(b.asJson)) - .through(utf8Encode[IO]) - } - - private def handleRequest(input: IO[InputStream], output: IO[OutputStream], context: Context): IO[Unit] = - tracingEntryPoint.use { - _.root("IOLambda").use { span => - Blocker[IO].use { blocker => - val response: IO[LambdaResponse[B]] = - LR - .read(Kleisli.liftF(Kleisli.liftF(input))) - .run(LambdaReaderEnvironment(blocker)) - .flatMap { a => - resources[IO](blocker)(a, context) - .mapK(Kleisli.liftK[IO, Span[IO]]) - .map(_.imapK(Kleisli.liftK[IO, Span[IO]])(λ[Kleisli[IO, Span[IO], *] ~> IO](_.run(span)))) - .use(handleRequestF[Kleisli[IO, Span[IO], *]](_)(a, context)) - } - .run(span) - - Stream.eval(response).flatMap { - case NoResponse => - Stream.empty - case ResponseWrapper(resp, encoder: Encoder[B]) => - Stream.emit(resp) - .flatMap(printToStream(_)(encoder)) - }.through(writeOutputStream(output, blocker)) - .compile - .drain - } - } - } - - final override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = - handleRequest(Sync[IO].delay(input), Sync[IO].delay(output), context) - .unsafeRunSync() -} - -object IOLambda { - object Defaults { - val printer: Printer = Printer.noSpaces - val executionContext: ExecutionContext = ExecutionContext.global // TODO change this to the IOApp default - val logRequest: Boolean = true - } -} - -object NoOpEntryPoint { - private def noOpSpan[F[_] : Applicative]: Resource[F, Span[F]] = Resource.pure[F, Span[F]](new Span[F] { - override def put(fields: (String, TraceValue)*): F[Unit] = ().pure[F] - override def kernel: F[Kernel] = Kernel(Map.empty).pure[F] - override def span(name: String): Resource[F, Span[F]] = noOpSpan[F] - override def traceId: F[Option[String]] = none[String].pure[F] - override def spanId: F[Option[String]] = none[String].pure[F] - override def traceUri: F[Option[URI]] = none[URI].pure[F] - }) - - def apply[F[_] : Applicative]: Resource[F, EntryPoint[F]] = - Resource.pure[F, EntryPoint[F]](new EntryPoint[F] { - override def root(name: String): Resource[F, Span[F]] = noOpSpan[F] - override def continue(name: String, kernel: Kernel): Resource[F, Span[F]] = noOpSpan[F] - override def continueOrElseRoot(name: String, kernel: Kernel): Resource[F, Span[F]] = noOpSpan[F] - }) -} - -case class BlockerK[F[_]](blocker: Blocker) - -object BlockerK { - implicit val blockerHolderInvariantK: InvariantK[BlockerK] = new InvariantK[BlockerK] { - override def imapK[F[_], G[_]](af: BlockerK[F])(fk: F ~> G)(gK: G ~> F): BlockerK[G] = BlockerK[G](af.blocker) - } -} diff --git a/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/LambdaReaderEnvironment.scala b/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/LambdaReaderEnvironment.scala deleted file mode 100644 index 85abef33..00000000 --- a/lambda-io-app/jvm/src/main/scala/com/dwolla/lambda/LambdaReaderEnvironment.scala +++ /dev/null @@ -1,78 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import _root_.fs2.io.readInputStream -import cats.Applicative -import cats.effect._ -import cats.implicits._ -import org.typelevel.log4cats.Logger -import natchez._ - -import scala.annotation.implicitNotFound - -class LambdaReaderEnvironment[F[_]](val blocker: Blocker, - val cs: ContextShift[F], - val l: Logger[F]) - -object LambdaReaderEnvironment { - def apply[F[_]](blocker: Blocker)(implicit CS: ContextShift[F], L: Logger[F]): LambdaReaderEnvironment[F] = - new LambdaReaderEnvironment(blocker, CS, L) - - def unapply[F[_]](env: LambdaReaderEnvironment[F]): Option[(Blocker, ContextShift[F], Logger[F])] = - Option((env.blocker, env.cs, env.l)) -} - -@implicitNotFound("Error resolving LambdaReader: Is there an io.circe.Decoder[${A}] in implicit scope?") -trait LambdaReader[F[_], A] { - def read(is: F[InputStream]): F[A] -} - -object LambdaReader extends LambdaReaderImplicits { - def apply[F[_], A](implicit ev: LambdaReader[F, A]): ev.type = ev -} - -trait LowPriorityLambdaReader { - import cats.data._ - import io.circe._ - import io.circe.parser._ - import fs2._ - import fs2.text._ - - implicit def readCirceDecodables[F[_] : Sync : Trace, A: Decoder]: LambdaReader[Kleisli[F, LambdaReaderEnvironment[F], *], A] = - new LambdaReader[Kleisli[F, LambdaReaderEnvironment[F], *], A] { - private val readFrom: Stream[F, Byte] => F[String] = s => Trace[F].span("readCirceDecodables.readFrom") { - s.through(utf8Decode[F]) - .compile - .string - } - - private def parseStream(input: Stream[F, Byte])(implicit L: Logger[F]): F[A] = Trace[F].span("readCirceDecodables.parseStream") { - for { - str <- readFrom(input) - json <- parseStringLoggingErrors(str) - req <- json.as[A].liftTo[F] - } yield req - } - - private def parseStringLoggingErrors(str: String)(implicit L: Logger[F]): F[Json] = Trace[F].span("readCirceDecodables.parseStringLoggingErrors") { - parse(str) - .toEitherT[F] - .leftSemiflatTap(Logger[F].error(_)(s"Could not parse the following input:\n$str")) - .rethrowT - } - - override def read(is: Kleisli[F, LambdaReaderEnvironment[F], InputStream]): Kleisli[F, LambdaReaderEnvironment[F], A] = - is.tapWith { (env: LambdaReaderEnvironment[F], i: InputStream) => (env.blocker, env.cs, env.l, i) } - .flatMapF { - case (b, cs, l, i) => - implicit val CS: ContextShift[F] = cs - implicit val L: Logger[F] = l - parseStream(readInputStream(i.pure[F], 4096, b)) - } - } -} - -trait LambdaReaderImplicits extends LowPriorityLambdaReader { - implicit def noInputLambdaReader[F[_] : Applicative]: LambdaReader[F, Unit] = _ => ().pure[F] -} diff --git a/lambda-io-app/jvm/src/test/java/com/dwolla/lambda/ConstructorTest.java b/lambda-io-app/jvm/src/test/java/com/dwolla/lambda/ConstructorTest.java deleted file mode 100644 index 47faa370..00000000 --- a/lambda-io-app/jvm/src/test/java/com/dwolla/lambda/ConstructorTest.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.dwolla.lambda; - -public class ConstructorTest { - public static TestInstance instance = new TestInstance(); -} diff --git a/lambda-io-app/jvm/src/test/resources/log4j2.xml b/lambda-io-app/jvm/src/test/resources/log4j2.xml deleted file mode 100644 index b197b5aa..00000000 --- a/lambda-io-app/jvm/src/test/resources/log4j2.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - %d{yyyy-MM-dd HH:mm:ss} %X{AWSRequestId} %-5p %c{1}:%L - %m%n - - - - - - - - - diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputAndOutputSpec.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputAndOutputSpec.scala deleted file mode 100644 index 0360d883..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputAndOutputSpec.scala +++ /dev/null @@ -1,42 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.services.lambda.runtime.Context -import org.typelevel.log4cats.Logger -import natchez._ -import org.specs2.mutable.Specification -import io.circe.literal._ -import io.circe.parser._ - -class LambdaWithInputAndOutputSpec extends Specification { - "Json-based instance" should { - "read and write JSON input and output" in { - val inputStream = new ByteArrayInputStream( - json"""{ - "foo": "Hello world" - }""".noSpaces.getBytes()) - - val outputStream = new ByteArrayOutputStream() - - val output = new TestInstance().handleRequest(inputStream, outputStream, null) - - output must be_==(()) - - val writtenOutput = parse(new String(outputStream.toByteArray)).flatMap(_.as[Output]) - writtenOutput must beRight(Output("Hello world")) - } - } -} - -private[lambda] class TestInstance extends IOLambda[Input, Output] { - override def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](blocker: Blocker) - (s: Input, context: Context): F[LambdaResponse[Output]] = - for { - _ <- Trace[F].span(s.foo) { - ().pure[F] - } - } yield Output(s.foo) -} diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputButNoOutputSpec.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputButNoOutputSpec.scala deleted file mode 100644 index c6060479..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithInputButNoOutputSpec.scala +++ /dev/null @@ -1,42 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.services.lambda.runtime.Context -import org.typelevel.log4cats.Logger -import natchez._ -import natchez.Trace.kleisliInstance -import org.specs2.mutable.Specification -import io.circe.literal._ - -class LambdaWithInputButNoOutputSpec extends Specification { - "Json-based instance" should { - "read JSON input" in { - val inputStream = new ByteArrayInputStream( - json"""{ - "foo": "Hello world" - }""".noSpaces.getBytes()) - - val outputStream = new OutputStream { - override def write(b: Int): Unit = throw IntentionallyThrownException - } - - val output = new InputWithoutOutputInstance().handleRequest(inputStream, outputStream, null) - - output must be_==(()) - } - } -} - -private[lambda] class InputWithoutOutputInstance extends IOLambda[Input, Unit] { - override def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](blocker: Blocker) - (s: Input, context: Context): F[LambdaResponse[Unit]] = - for { - _ <- Trace[F].span(s.foo) { - if (s.foo == "Hello world") ().pure[F] - else IntentionallyThrownException.raiseError[F, Unit] - } - } yield () -} diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithOutputButNoInputSpec.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithOutputButNoInputSpec.scala deleted file mode 100644 index 93ad4838..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithOutputButNoInputSpec.scala +++ /dev/null @@ -1,39 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.services.lambda.runtime.Context -import org.typelevel.log4cats.Logger -import natchez._ -import org.specs2.mutable.Specification -import io.circe.parser._ - -class LambdaWithOutputButNoInputSpec extends Specification { - "Json-based instance" should { - "JSON output" in { - val explodingInputStream = new InputStream { - override def read(): Int = throw IntentionallyThrownException - } - val outputStream = new ByteArrayOutputStream() - - val output = new LambdaWithOutputButNoInput().handleRequest(explodingInputStream, outputStream, null) - - output must be_==(()) - - val writtenOutput = parse(new String(outputStream.toByteArray)).flatMap(_.as[Output]) - writtenOutput must beRight(Output("Hello world")) - } - } -} - -private[lambda] class LambdaWithOutputButNoInput extends IOLambda[Unit, Output] { - override def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](blocker: Blocker) - (s: Unit, context: Context): F[LambdaResponse[Output]] = - for { - _ <- Trace[F].span("LambdaWithOutputButNoInput") { - s.pure[F] - } - } yield Output("Hello world") -} diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithoutIOSpec.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithoutIOSpec.scala deleted file mode 100644 index 8d08b3ba..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/LambdaWithoutIOSpec.scala +++ /dev/null @@ -1,49 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.services.lambda.runtime.Context -import org.typelevel.log4cats.Logger -import natchez._ -import org.specs2.mutable.Specification - -class NoInputInstanceSpec extends Specification { - - "NoIO instance" should { - "not read from input" in { - val explodingInputStream = new InputStream { - override def read(): Int = throw IntentionallyThrownException - } - - val outputStream = new ByteArrayOutputStream() - - val context: Context = null - - val output = new NoIOInstance().handleRequest(explodingInputStream, outputStream, context) - - output must be_==(()) - } - - "not write to output" in { - val explodingInputStream = new ByteArrayInputStream(Array.empty) - - val outputStream = new OutputStream { - override def write(b: Int): Unit = throw IntentionallyThrownException - } - - val context: Context = null - - val output = new NoIOInstance().handleRequest(explodingInputStream, outputStream, context) - - output must be_==(()) - } - } -} - -private[lambda] class NoIOInstance extends IOLambda[Unit, Unit] { - override def handleRequestF[F[_] : Concurrent : ContextShift : Logger : Timer : Trace](blocker: Blocker) - (req: Unit, context: Context): F[LambdaResponse[Unit]] = - NoResponse.pure[F].widen -} diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/ReadCirceDecodablesSpec.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/ReadCirceDecodablesSpec.scala deleted file mode 100644 index afeb1f22..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/ReadCirceDecodablesSpec.scala +++ /dev/null @@ -1,63 +0,0 @@ -package com.dwolla.lambda - -import java.io._ - -import cats._ -import cats.data._ -import cats.effect._ -import cats.implicits._ -import com.dwolla.lambda.LambdaReader._ -import org.typelevel.log4cats.Logger -import io.circe._ -import io.circe.generic.semiauto._ -import io.circe.syntax._ -import natchez.Trace.Implicits._ -import org.scalacheck._ -import org.specs2._ -import org.specs2.concurrent.ExecutionEnv -import org.specs2.matcher._ -import org.specs2.mutable.Specification - -class ReadCirceDecodablesSpec(implicit ee: ExecutionEnv) extends Specification with Matchers with ScalaCheck { - implicit val contextShift: ContextShift[IO] = IO.contextShift(ee.ec) - - "readCirceDecodables" should { - "read and parse input larger than the chunk size" >> prop { foo: Foo => - (for { - is <- IO(new ByteArrayInputStream(foo.asJson.noSpaces.getBytes())) - output <- Blocker[IO] - .map(new LambdaReaderEnvironment[IO](_, contextShift, new NoOpLogger[IO])) - .use(readCirceDecodables[IO, Foo].read(Kleisli.pure(is)).run(_)) - } yield (output must_== foo)) - .unsafeRunSync() - } - } -} - -case class Foo(bar: String) -object Foo { - implicit val fooCodec: Codec[Foo] = deriveCodec - - private val bufferSizeMinusFooJsonPadding: Int = 4096 - Foo("").asJson.noSpaces.length + 1 - private val oneMegabyte: Int = 1048576 - - implicit val fooGen: Arbitrary[Foo] = Arbitrary( - for { - size <- Gen.chooseNum(bufferSizeMinusFooJsonPadding, oneMegabyte) - charList <- Gen.listOfN(size, Gen.alphaNumChar) - } yield Foo(charList.mkString) - ) -} - -class NoOpLogger[F[_] : Applicative] extends Logger[F] { - override def error(message: => String): F[Unit] = ().pure[F] - override def warn(message: =>String): F[Unit] = ().pure[F] - override def info(message: =>String): F[Unit] = ().pure[F] - override def debug(message: =>String): F[Unit] = ().pure[F] - override def trace(message: =>String): F[Unit] = ().pure[F] - override def error(t: Throwable)(message: =>String): F[Unit] = ().pure[F] - override def warn(t: Throwable)(message: =>String): F[Unit] = ().pure[F] - override def info(t: Throwable)(message: =>String): F[Unit] = ().pure[F] - override def debug(t: Throwable)(message: =>String): F[Unit] = ().pure[F] - override def trace(t: Throwable)(message: =>String): F[Unit] = ().pure[F] -} diff --git a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/test-model.scala b/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/test-model.scala deleted file mode 100644 index 65001a63..00000000 --- a/lambda-io-app/jvm/src/test/scala/com/dwolla/lambda/test-model.scala +++ /dev/null @@ -1,15 +0,0 @@ -package com.dwolla.lambda - -case object IntentionallyThrownException extends RuntimeException("Boom!", null, true, false) - -private[lambda] case class Input(foo: String) - -object Input { - implicit val inputCodec: io.circe.Codec[Input] = io.circe.generic.semiauto.deriveCodec -} - -private[lambda] case class Output(foo: String) - -object Output { - implicit val outputCodec: io.circe.Codec[Output] = io.circe.generic.semiauto.deriveCodec -} diff --git a/lambda-io-app/shared/src/main/scala/com/dwolla/lambda/package.scala b/lambda-io-app/shared/src/main/scala/com/dwolla/lambda/package.scala deleted file mode 100644 index df241c86..00000000 --- a/lambda-io-app/shared/src/main/scala/com/dwolla/lambda/package.scala +++ /dev/null @@ -1,30 +0,0 @@ -package com.dwolla - -import cats._ -import cats.data._ -import cats.implicits._ -import io.circe._ - -package object lambda extends LowPriorityLambdaResponseImplicits { - implicit class LeftSemiflatTap[F[_], A, B](private val eitherT: EitherT[F, A, B]) extends AnyVal { - def leftSemiflatTap[C](f: A => F[C])(implicit M: Monad[F]): EitherT[F, A, B] = - eitherT.leftSemiflatMap(x => f(x) map (_ => x)) - } - - implicit val unitToResponseWrapper: Unit => LambdaResponse[Nothing] = _ => NoResponse - implicit def fUnitToResponseWrapper[F[_] : Functor]: F[Unit] => F[LambdaResponse[Nothing]] = _.map(unitToResponseWrapper) -} - -package lambda { - trait LowPriorityLambdaResponseImplicits { - implicit def encodableToResponseWrapper[T: Encoder](t: T): LambdaResponse[T] = - ResponseWrapper(t, Encoder[T]) - - implicit def encodableFToResponseWrapper[F[_] : Functor, T: Encoder](ft: F[T]): F[LambdaResponse[T]] = - ft.map(ResponseWrapper(_, Encoder[T])) - } - - sealed trait LambdaResponse[+T] - case object NoResponse extends LambdaResponse[Nothing] - case class ResponseWrapper[T](response: T, encoder: Encoder[T]) extends LambdaResponse[T] -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/ExecuteVia.scala b/main/src/main/scala/com/dwolla/fs2aws/ExecuteVia.scala deleted file mode 100644 index 39d5227e..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/ExecuteVia.scala +++ /dev/null @@ -1,16 +0,0 @@ -package com.dwolla.fs2aws - -import cats.effect._ -import com.amazonaws.AmazonWebServiceRequest -import com.amazonaws.handlers.AsyncHandler - -class ExecuteVia[F[_], Req <: AmazonWebServiceRequest](val req: Req) extends AnyVal { - def apply[Res](awsAsyncFunction: AwsAsyncFunction[Req, Res])(implicit F: Async[F]): F[Res] = F.async[Res] { callback => - awsAsyncFunction(req, new AsyncHandler[Req, Res] { - override def onError(exception: Exception): Unit = callback(Left(exception)) - override def onSuccess(request: Req, result: Res): Unit = callback(Right(result)) - }) - - () - } -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/FetchAll.scala b/main/src/main/scala/com/dwolla/fs2aws/FetchAll.scala deleted file mode 100644 index 410138c5..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/FetchAll.scala +++ /dev/null @@ -1,11 +0,0 @@ -package com.dwolla.fs2aws - -import cats.effect._ -import fs2.Stream - -class FetchAll[F[_], Req <: PaginatedRequest](val requestFactory: () => Req) extends AnyVal { - def apply[Res <: PaginatedResult, T](awsAsyncFunction: AwsAsyncFunction[Req, Res]) - (extractor: Res => Seq[T]) - (implicit F: Effect[F]): Stream[F, T] = - new PaginatedAwsClient(requestFactory).via(awsAsyncFunction)(extractor) -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/PaginatedAwsClient.scala b/main/src/main/scala/com/dwolla/fs2aws/PaginatedAwsClient.scala deleted file mode 100644 index ec839997..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/PaginatedAwsClient.scala +++ /dev/null @@ -1,21 +0,0 @@ -package com.dwolla.fs2aws - -import cats.effect.Effect -import cats.implicits._ -import com.dwolla.fs2utils.Pagination -import fs2._ - -import scala.language.reflectiveCalls - -class PaginatedAwsClient[F[_] : Effect, Req <: PaginatedRequest, Res <: PaginatedResult, T](requestFactory: () => Req) { - def via(awsAsyncFunction: AwsAsyncFunction[Req, Res])(extractor: Res => Seq[T]): Stream[F, T] = { - val fetchPage = (maybeNextToken: Option[String]) => { - val req = requestFactory() - maybeNextToken.foreach(req.setNextToken) - - req.executeVia[F](awsAsyncFunction).map((res: Res) => (Chunk.seq(extractor(res)), Option(res.getNextToken()))) - } - - Pagination.offsetUnfoldChunkEval(fetchPage) - } -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClient.scala b/main/src/main/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClient.scala deleted file mode 100644 index e43aa8db..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClient.scala +++ /dev/null @@ -1,132 +0,0 @@ -package com.dwolla.fs2aws.cloudformation - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.AmazonWebServiceRequest -import com.amazonaws.regions.Regions -import com.amazonaws.services.cloudformation.model.Capability.CAPABILITY_IAM -import com.amazonaws.services.cloudformation.model.ChangeSetType._ -import com.amazonaws.services.cloudformation.model.StackStatus._ -import com.amazonaws.services.cloudformation.model.{Parameter => AwsParameter, _} -import com.amazonaws.services.cloudformation._ -import com.dwolla.fs2aws._ -import com.dwolla.fs2aws.cloudformation.CloudFormationClient._ -import com.dwolla.fs2aws.cloudformation.Implicits._ - -import scala.jdk.CollectionConverters._ -import scala.language.reflectiveCalls - -trait CloudFormationClient[F[_]] { - def createOrUpdateTemplate(stackName: String, - template: String, - params: List[(String, String)] = List.empty[(String, String)], - roleArn: Option[String] = None, - changeSetName: Option[String] = None): F[StackID] -} - -class CloudFormationClientImpl[F[_] : Effect](client: AmazonCloudFormationAsync) extends CloudFormationClient[F] { - override def createOrUpdateTemplate(stackName: String, - template: String, - params: List[(String, String)], - roleArn: Option[String], - changeSetName: Option[String]): F[StackID] = - for { - maybeStack <- getStackByName(stackName) - stackOperation = maybeStack.fold(buildStackOperation(createStack, CREATE)) { - case stack if updatableStackStatuses.contains(stackStatus(stack.getStackStatus)) => buildStackOperation(updateStack, UPDATE) - case stack => (_, _) => Sync[F].raiseError(StackNotUpdatableException(stack.getStackName, stack.getStackStatus)) - } - res <- stackOperation(StackDetails(stackName, template, params, roleArn), changeSetName) - } yield res - - private def buildStackOperation[T](func: T => F[StackID], changeSetType: ChangeSetType) - (implicit ev1: StackDetails => T): (StackDetails, Option[String]) => F[StackID] = - (stackDetails, changeSetName) => changeSetName.fold(func(stackDetails))(createChangeSet(_, stackDetails.withChangeSetType(changeSetType))) - - private def getStackByName(name: String): F[Option[Stack]] = (() => new DescribeStacksRequest()).fetchAll(client.describeStacksAsync)(_.getStacks.asScala.toSeq) - .filter(s => s.getStackName == name && StackStatus.valueOf(s.getStackStatus) != DELETE_COMPLETE) - .compile - .last - - private def createStack(createStackRequest: CreateStackRequest): F[StackID] = makeRequestAndExtractStackId(createStackRequest, client.createStackAsync) - - private def updateStack(updateStackRequest: UpdateStackRequest): F[StackID] = makeRequestAndExtractStackId(updateStackRequest, client.updateStackAsync) - - private def createChangeSet(changeSetName: String, createChangeSetRequest: CreateChangeSetRequest): F[StackID] = - makeRequestAndExtractStackId(createChangeSetRequest.withChangeSetName(changeSetName), client.createChangeSetAsync) - - //noinspection AccessorLikeMethodIsEmptyParen - private def makeRequestAndExtractStackId[Req <: AmazonWebServiceRequest, Res <: {def getStackId() : StackID}](req: Req, func: AwsAsyncFunction[Req, Res]): F[StackID] = - req.executeVia(func).map(_.getStackId()) - -} - -object CloudFormationClient { - def apply[F[_] : Effect]: CloudFormationClient[F] = new CloudFormationClientImpl[F](clientForRegion(None)) - - def apply[F[_] : Effect](r: String): CloudFormationClient[F] = apply[F](Regions.fromName(r)) - - def apply[F[_] : Effect](r: Regions): CloudFormationClient[F] = new CloudFormationClientImpl[F](clientForRegion(Option(r))) - - val updatableStackStatuses: Seq[StackStatus] = Seq( - CREATE_COMPLETE, - ROLLBACK_COMPLETE, - UPDATE_COMPLETE, - UPDATE_ROLLBACK_COMPLETE - ) - - private def clientForRegion(r: Option[Regions]) = r.fold(AmazonCloudFormationAsyncClientBuilder.defaultClient()) { providedRegion => - AmazonCloudFormationAsyncClientBuilder.standard().withRegion(providedRegion).build() - } -} - -case class StackDetails(name: String, template: String, parameters: List[AwsParameter], roleArn: Option[String] = None) - -trait Builder[StackRequest] { - def withStackName(name: String): StackRequest - def withTemplateBody(name: String): StackRequest - def withParameters(params: List[AwsParameter]): StackRequest - def withCapabilities(capabilities: Capability*): StackRequest - def withRoleArn(roleArn: String): StackRequest -} - -object Implicits { - implicit class CreateStackRequestToBuilder(s: CreateStackRequest) extends Builder[CreateStackRequest] { - override def withStackName(name: String): CreateStackRequest = s.withStackName(name) - override def withTemplateBody(name: String): CreateStackRequest = s.withTemplateBody(name) - override def withParameters(params: List[AwsParameter]): CreateStackRequest = s.withParameters(params.asJavaCollection) - override def withCapabilities(capabilities: Capability*): CreateStackRequest = s.withCapabilities(capabilities: _*) - override def withRoleArn(roleArn: String): CreateStackRequest = s.withRoleARN(roleArn) - } - implicit class CreateChangeSetRequestToBuilder(s: CreateChangeSetRequest) extends Builder[CreateChangeSetRequest] { - override def withStackName(name: String): CreateChangeSetRequest = s.withStackName(name) - override def withTemplateBody(name: String): CreateChangeSetRequest = s.withTemplateBody(name) - override def withParameters(params: List[AwsParameter]): CreateChangeSetRequest = s.withParameters(params.asJavaCollection) - override def withCapabilities(capabilities: Capability*): CreateChangeSetRequest = s.withCapabilities(capabilities: _*) - override def withRoleArn(roleArn: String): CreateChangeSetRequest = s.withRoleARN(roleArn) - } - implicit class UpdateStackRequestToBuilder(s: UpdateStackRequest) extends Builder[UpdateStackRequest] { - override def withStackName(name: String): UpdateStackRequest = s.withStackName(name) - override def withTemplateBody(name: String): UpdateStackRequest = s.withTemplateBody(name) - override def withParameters(params: List[AwsParameter]): UpdateStackRequest = s.withParameters(params.asJavaCollection) - override def withCapabilities(capabilities: Capability*): UpdateStackRequest = s.withCapabilities(capabilities: _*) - override def withRoleArn(roleArn: String): UpdateStackRequest = s.withRoleARN(roleArn) - } - - implicit def potentialStackToCreateRequest(ps: StackDetails): CreateStackRequest = populate(ps, new CreateStackRequest) - implicit def potentialStackToUpdateRequest(ps: StackDetails): UpdateStackRequest = populate(ps, new UpdateStackRequest) - implicit def potentialStackToCreateChangeSetRequest(ps: StackDetails): CreateChangeSetRequest = populate(ps, new CreateChangeSetRequest) - implicit def tuplesToParams(tuples: List[(String, String)]): List[AwsParameter] = tuples.map { - case (key, value) => new AwsParameter().withParameterKey(key).withParameterValue(value) - } - implicit def stackStatus(status: String): StackStatus = StackStatus.valueOf(status) - - private def populate[T](ps: StackDetails, builder: Builder[T])(implicit ev: T => Builder[T]): T = { - val t = builder.withStackName(ps.name) - .withTemplateBody(ps.template) - .withParameters(ps.parameters) - .withCapabilities(CAPABILITY_IAM) - - ps.roleArn.fold(t)(t.withRoleArn) - } -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/StackNotUpdatableException.scala b/main/src/main/scala/com/dwolla/fs2aws/cloudformation/StackNotUpdatableException.scala deleted file mode 100644 index a01a6e33..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/StackNotUpdatableException.scala +++ /dev/null @@ -1,6 +0,0 @@ -package com.dwolla.fs2aws.cloudformation - -import com.amazonaws.services.cloudformation.model.StackStatus - -case class StackNotUpdatableException(private val name: String, - private val status: StackStatus) extends RuntimeException(s"Stack $name is in status $status, which cannot be updated.") diff --git a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/package.scala b/main/src/main/scala/com/dwolla/fs2aws/cloudformation/package.scala deleted file mode 100644 index 39528460..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/cloudformation/package.scala +++ /dev/null @@ -1,5 +0,0 @@ -package com.dwolla.fs2aws - -package object cloudformation { - type StackID = String -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/kms/KmsDecrypter.scala b/main/src/main/scala/com/dwolla/fs2aws/kms/KmsDecrypter.scala deleted file mode 100644 index 161415f4..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/kms/KmsDecrypter.scala +++ /dev/null @@ -1,67 +0,0 @@ -package com.dwolla.fs2aws.kms - -import java.nio.ByteBuffer - -import cats.data.Kleisli -import cats.effect._ -import cats.implicits._ -import com.amazonaws.regions.Regions -import com.amazonaws.regions.Regions.US_WEST_2 -import com.amazonaws.services.kms._ -import com.amazonaws.services.kms.model._ -import com.dwolla.fs2aws._ -import fs2._ - -trait KmsDecrypter[F[_]] { - def decrypt[A](transformer: Transform[A], cryptoText: A): F[Array[Byte]] - def decrypt[A](transform: Transform[A], cryptoTexts: (String, A)*): Stream[F, Map[String, Array[Byte]]] - def decryptBase64(cryptoTexts: (String, String)*): Stream[F, Map[String, Array[Byte]]] -} - -class KmsDecrypterImpl[F[_] : Concurrent](asyncClient: AWSKMSAsync) extends KmsDecrypter[F] { - def decrypt[A](transformer: Transform[A], cryptoText: A): F[Array[Byte]] = - decrypt[A]((transformer, cryptoText)) - - def decrypt[A](transform: Transform[A], cryptoTexts: (String, A)*): Stream[F, Map[String, Array[Byte]]] = - Stream.emits(cryptoTexts) - .map { - case (name, cryptoText) => decrypt(transform, cryptoText).map(name -> _) - } - .map(Stream.eval) - .parJoin(10) - .fold(Map.empty[String, Array[Byte]]) { - case (map, tuple) => map + tuple - } - - def decryptBase64(cryptoTexts: (String, String)*): Stream[F, Map[String, Array[Byte]]] = - decrypt(base64DecodingTransform, cryptoTexts: _*) - - private def buildDecryptionRequest[A]: Kleisli[F, (Transform[A], A), DecryptRequest] = - Kleisli { case (transformer, cryptoText) => - Sync[F].delay(new DecryptRequest().withCiphertextBlob(ByteBuffer.wrap(transformer(cryptoText)))) - } - - private def executeDecryptionRequest: Kleisli[F, DecryptRequest, Array[Byte]] = Kleisli { - _.executeVia[F](asyncClient.decryptAsync) - .map(_.getPlaintext.array()) - } - - private def decrypt[A]: Kleisli[F, (Transform[A], A), Array[Byte]] = - buildDecryptionRequest[A] andThen executeDecryptionRequest -} - -object KmsDecrypter { - private def acquireKmsClient[F[_] : Sync](region: Regions): F[AWSKMSAsync] = - Sync[F].delay(AWSKMSAsyncClientBuilder.standard().withRegion(region).build()) - - private def shutdownKmsClient[F[_] : Sync](client: AWSKMSAsync): F[Unit] = - Sync[F].delay(client.shutdown()) - - def resource[F[_] : Concurrent](region: Regions = US_WEST_2): Resource[F, KmsDecrypter[F]] = - Resource - .make(acquireKmsClient(region))(shutdownKmsClient[F]) - .map(new KmsDecrypterImpl[F](_)) - - def stream[F[_] : Concurrent](region: Regions = US_WEST_2): Stream[F, KmsDecrypter[F]] = - Stream.resource(KmsDecrypter.resource(region)) -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/kms/package.scala b/main/src/main/scala/com/dwolla/fs2aws/kms/package.scala deleted file mode 100644 index f2845929..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/kms/package.scala +++ /dev/null @@ -1,11 +0,0 @@ -package com.dwolla.fs2aws - -import java.util.Base64 - -package object kms { - type Transform[A] = A => Array[Byte] - - val noopTransform: Transform[Array[Byte]] = identity - private val decoder: Base64.Decoder = Base64.getDecoder - val base64DecodingTransform: Transform[String] = decoder.decode -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/package.scala b/main/src/main/scala/com/dwolla/fs2aws/package.scala deleted file mode 100644 index bfb92849..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/package.scala +++ /dev/null @@ -1,23 +0,0 @@ -package com.dwolla - -import java.util.concurrent.{Future => JFuture} - -import com.amazonaws.AmazonWebServiceRequest -import com.amazonaws.handlers.AsyncHandler - -package object fs2aws { - //noinspection ScalaUnusedSymbol - type PaginatedRequest = AmazonWebServiceRequest {def setNextToken(s: String): Unit} - //noinspection AccessorLikeMethodIsEmptyParen,ScalaUnusedSymbol - type PaginatedResult = {def getNextToken(): String} - type AwsAsyncFunction[Req <: AmazonWebServiceRequest, Res] = (Req, AsyncHandler[Req, Res]) => JFuture[Res] - - implicit class ExecuteViaOps[Req <: AmazonWebServiceRequest](val req: Req) extends AnyVal { - def executeVia[F[_]] = new ExecuteVia[F, Req](req) - } - - implicit class FetchAllOps[Req <: PaginatedRequest](val requestFactory: () => Req) { - def fetchAll[F[_]] = new FetchAll[F, Req](requestFactory) - } - -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/s3/S3Client.scala b/main/src/main/scala/com/dwolla/fs2aws/s3/S3Client.scala deleted file mode 100644 index 6c95be35..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/s3/S3Client.scala +++ /dev/null @@ -1,98 +0,0 @@ -package com.dwolla.fs2aws.s3 - -import cats.effect._ -import cats.implicits._ -import com.amazonaws.event.ProgressEvent -import com.amazonaws.event.ProgressEventType._ -import com.amazonaws.services.s3.model.{Bucket => _, _} -import com.amazonaws.services.s3.transfer._ -import com.amazonaws.services.s3.transfer.internal._ -import com.dwolla.fs2utils.Pagination -import fs2.{io, _} - -import scala.jdk.CollectionConverters._ - -trait S3Client[F[_]] { - val blocker: Blocker - def listBucket(bucket: Bucket): Stream[F, Key] - def uploadSink(bucket: Bucket, key: Key, objectMetadata: ObjectMetadata): Pipe[F, Byte, Unit] - def downloadObject(bucket: Bucket, key: Key): Stream[F, Byte] - def deleteObject(bucket: Bucket, key: Key): Stream[F, Unit] -} - -object S3Client { - def resource[F[_] : ConcurrentEffect : ContextShift]: Resource[F, S3Client[F]] = - resource[F](None) - - def resource[F[_] : ConcurrentEffect : ContextShift](blocker: Option[Blocker] = None): Resource[F, S3Client[F]] = - for { - blocker <- blocker.fold(Blocker[F])(Resource.pure[F, Blocker](_)) - transferManager <- Resource.make(acquireTransferManager[F])(shutdown[F]) - } yield new S3ClientImpl[F](transferManager, blocker) - - def stream[F[_] : ConcurrentEffect : ContextShift]: Stream[F, S3Client[F]] = - Stream.resource(S3Client.resource[F]) - - private def acquireTransferManager[F[_] : Sync]: F[TransferManager] = - Sync[F].delay(TransferManagerBuilder.defaultTransferManager()) - - private def shutdown[F[_] : Sync](tm: TransferManager): F[Unit] = - Sync[F].delay(tm.shutdownNow()) - - class S3ClientImpl[F[_] : ConcurrentEffect : ContextShift] private[s3](transferManager: TransferManager, val blocker: Blocker) extends S3Client[F] { - override def uploadSink(bucket: Bucket, - key: Key, - objectMetadata: ObjectMetadata - ): Pipe[F, Byte, Unit] = (s: Stream[F, Byte]) => - for { - is <- s.through(io.toInputStream) - uploadRequest = new PutObjectRequest(bucket, key, is, objectMetadata) - _ <- upload(uploadRequest) - } yield () - - override def downloadObject(bucket: Bucket, key: Key): Stream[F, Byte] = - io.readInputStream(Sync[F].delay(transferManager.getAmazonS3Client.getObject(bucket, key).getObjectContent), chunkSize = 128, blocker) - - override def deleteObject(bucket: Bucket, key: Key): Stream[F, Unit] = - Stream.eval(Sync[F].delay(transferManager.getAmazonS3Client.deleteObject(bucket, key))) - - private def upload(req: PutObjectRequest): Stream[F, Unit] = - Stream.eval(Async[F].async[Unit] { cb => - transferManager.upload(req, new S3ProgressListener() { - private def success(): Unit = cb(Right(())) - - private def failure(ex: Exception): Unit = cb(Left(ex)) - - override def onPersistableTransfer(pt: PersistableTransfer): Unit = () - - override def progressChanged(event: ProgressEvent): Unit = - event.getEventType match { - case TRANSFER_COMPLETED_EVENT => success() - case TRANSFER_FAILED_EVENT => failure(TransferFailedEventException) - case TRANSFER_CANCELED_EVENT => failure(TransferCanceledEventException) - case _ => () - } - }) - - () - }) - - override def listBucket(bucket: Bucket): Stream[F, Key] = - Pagination.offsetUnfoldChunkEval[F, String, Key] { maybeMarker: Option[String] => - val request = new ListObjectsRequest().withBucketName(bucket) - maybeMarker.foreach(request.setMarker) - - for { - res <- Sync[F].delay(transferManager.getAmazonS3Client.listObjects(request)) - } yield { - val resultChunk = Chunk.seq(res.getObjectSummaries.asScala).map(_.getKey).map(tagKey) - val maybeNextMarker = Option(res.getNextMarker) - - (resultChunk, maybeNextMarker) - } - } - } - - case object TransferFailedEventException extends RuntimeException("S3 transfer failed") - case object TransferCanceledEventException extends RuntimeException("S3 transfer canceled") -} diff --git a/main/src/main/scala/com/dwolla/fs2aws/s3/package.scala b/main/src/main/scala/com/dwolla/fs2aws/s3/package.scala deleted file mode 100644 index 5621ea2f..00000000 --- a/main/src/main/scala/com/dwolla/fs2aws/s3/package.scala +++ /dev/null @@ -1,24 +0,0 @@ -package com.dwolla.fs2aws - -import shapeless.tag -import shapeless.tag._ - -package s3 { - trait BucketTag - trait PrefixTag - trait KeyTag -} - -package object s3 { - type Bucket = String @@ BucketTag - type Prefix = String @@ PrefixTag - type Key = String @@ KeyTag - - val tagBucket: String => Bucket = tag[BucketTag][String] - val tagPrefix: String => Prefix = tag[PrefixTag][String] - val tagKey: String => Key = tag[KeyTag][String] - - implicit class EnhancedPrefix(val prefix: Prefix) extends AnyVal { - def /(id: String): Key = tagKey(s"$prefix/$id") - } -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/FakeAmazonCloudFormationAsyncClient.scala b/main/src/test/scala/com/dwolla/fs2aws/FakeAmazonCloudFormationAsyncClient.scala deleted file mode 100644 index f65b5844..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/FakeAmazonCloudFormationAsyncClient.scala +++ /dev/null @@ -1,437 +0,0 @@ -package com.dwolla.fs2aws - -import com.amazonaws.{AmazonWebServiceRequest, ResponseMetadata} -import com.amazonaws.handlers.AsyncHandler -import com.amazonaws.regions.Region -import com.amazonaws.services.cloudformation.AmazonCloudFormationAsync -import com.amazonaws.services.cloudformation.model._ -import com.amazonaws.services.cloudformation.waiters.AmazonCloudFormationWaiters - -import java.util.concurrent.Future - -//noinspection NotImplementedCode,ScalaDeprecation -class FakeAmazonCloudFormationAsyncClient extends AmazonCloudFormationAsync { - override def activateTypeAsync(activateTypeRequest: ActivateTypeRequest): Future[ActivateTypeResult] = ??? - - override def activateTypeAsync(activateTypeRequest: ActivateTypeRequest, asyncHandler: AsyncHandler[ActivateTypeRequest, ActivateTypeResult]): Future[ActivateTypeResult] = ??? - - override def batchDescribeTypeConfigurationsAsync(batchDescribeTypeConfigurationsRequest: BatchDescribeTypeConfigurationsRequest): Future[BatchDescribeTypeConfigurationsResult] = ??? - - override def batchDescribeTypeConfigurationsAsync(batchDescribeTypeConfigurationsRequest: BatchDescribeTypeConfigurationsRequest, asyncHandler: AsyncHandler[BatchDescribeTypeConfigurationsRequest, BatchDescribeTypeConfigurationsResult]): Future[BatchDescribeTypeConfigurationsResult] = ??? - - override def deactivateTypeAsync(deactivateTypeRequest: DeactivateTypeRequest): Future[DeactivateTypeResult] = ??? - - override def deactivateTypeAsync(deactivateTypeRequest: DeactivateTypeRequest, asyncHandler: AsyncHandler[DeactivateTypeRequest, DeactivateTypeResult]): Future[DeactivateTypeResult] = ??? - - override def describePublisherAsync(describePublisherRequest: DescribePublisherRequest): Future[DescribePublisherResult] = ??? - - override def describePublisherAsync(describePublisherRequest: DescribePublisherRequest, asyncHandler: AsyncHandler[DescribePublisherRequest, DescribePublisherResult]): Future[DescribePublisherResult] = ??? - - override def importStacksToStackSetAsync(importStacksToStackSetRequest: ImportStacksToStackSetRequest): Future[ImportStacksToStackSetResult] = ??? - - override def importStacksToStackSetAsync(importStacksToStackSetRequest: ImportStacksToStackSetRequest, asyncHandler: AsyncHandler[ImportStacksToStackSetRequest, ImportStacksToStackSetResult]): Future[ImportStacksToStackSetResult] = ??? - - override def publishTypeAsync(publishTypeRequest: PublishTypeRequest): Future[PublishTypeResult] = ??? - - override def publishTypeAsync(publishTypeRequest: PublishTypeRequest, asyncHandler: AsyncHandler[PublishTypeRequest, PublishTypeResult]): Future[PublishTypeResult] = ??? - - override def registerPublisherAsync(registerPublisherRequest: RegisterPublisherRequest): Future[RegisterPublisherResult] = ??? - - override def registerPublisherAsync(registerPublisherRequest: RegisterPublisherRequest, asyncHandler: AsyncHandler[RegisterPublisherRequest, RegisterPublisherResult]): Future[RegisterPublisherResult] = ??? - - override def rollbackStackAsync(rollbackStackRequest: RollbackStackRequest): Future[RollbackStackResult] = ??? - - override def rollbackStackAsync(rollbackStackRequest: RollbackStackRequest, asyncHandler: AsyncHandler[RollbackStackRequest, RollbackStackResult]): Future[RollbackStackResult] = ??? - - override def setTypeConfigurationAsync(setTypeConfigurationRequest: SetTypeConfigurationRequest): Future[SetTypeConfigurationResult] = ??? - - override def setTypeConfigurationAsync(setTypeConfigurationRequest: SetTypeConfigurationRequest, asyncHandler: AsyncHandler[SetTypeConfigurationRequest, SetTypeConfigurationResult]): Future[SetTypeConfigurationResult] = ??? - - override def testTypeAsync(testTypeRequest: TestTypeRequest): Future[TestTypeResult] = ??? - - override def testTypeAsync(testTypeRequest: TestTypeRequest, asyncHandler: AsyncHandler[TestTypeRequest, TestTypeResult]): Future[TestTypeResult] = ??? - - override def activateType(activateTypeRequest: ActivateTypeRequest): ActivateTypeResult = ??? - - override def batchDescribeTypeConfigurations(batchDescribeTypeConfigurationsRequest: BatchDescribeTypeConfigurationsRequest): BatchDescribeTypeConfigurationsResult = ??? - - override def deactivateType(deactivateTypeRequest: DeactivateTypeRequest): DeactivateTypeResult = ??? - - override def describePublisher(describePublisherRequest: DescribePublisherRequest): DescribePublisherResult = ??? - - override def importStacksToStackSet(importStacksToStackSetRequest: ImportStacksToStackSetRequest): ImportStacksToStackSetResult = ??? - - override def publishType(publishTypeRequest: PublishTypeRequest): PublishTypeResult = ??? - - override def registerPublisher(registerPublisherRequest: RegisterPublisherRequest): RegisterPublisherResult = ??? - - override def rollbackStack(rollbackStackRequest: RollbackStackRequest): RollbackStackResult = ??? - - override def setTypeConfiguration(setTypeConfigurationRequest: SetTypeConfigurationRequest): SetTypeConfigurationResult = ??? - - override def testType(testTypeRequest: TestTypeRequest): TestTypeResult = ??? - - override def cancelUpdateStackAsync(cancelUpdateStackRequest: CancelUpdateStackRequest): Future[CancelUpdateStackResult] = ??? - - override def cancelUpdateStackAsync(cancelUpdateStackRequest: CancelUpdateStackRequest, asyncHandler: AsyncHandler[CancelUpdateStackRequest, CancelUpdateStackResult]): Future[CancelUpdateStackResult] = ??? - - override def continueUpdateRollbackAsync(continueUpdateRollbackRequest: ContinueUpdateRollbackRequest): Future[ContinueUpdateRollbackResult] = ??? - - override def continueUpdateRollbackAsync(continueUpdateRollbackRequest: ContinueUpdateRollbackRequest, asyncHandler: AsyncHandler[ContinueUpdateRollbackRequest, ContinueUpdateRollbackResult]): Future[ContinueUpdateRollbackResult] = ??? - - override def createChangeSetAsync(createChangeSetRequest: CreateChangeSetRequest): Future[CreateChangeSetResult] = ??? - - override def createChangeSetAsync(createChangeSetRequest: CreateChangeSetRequest, asyncHandler: AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]): Future[CreateChangeSetResult] = ??? - - override def createStackAsync(createStackRequest: CreateStackRequest): Future[CreateStackResult] = ??? - - override def createStackAsync(createStackRequest: CreateStackRequest, asyncHandler: AsyncHandler[CreateStackRequest, CreateStackResult]): Future[CreateStackResult] = ??? - - override def createStackInstancesAsync(createStackInstancesRequest: CreateStackInstancesRequest): Future[CreateStackInstancesResult] = ??? - - override def createStackInstancesAsync(createStackInstancesRequest: CreateStackInstancesRequest, asyncHandler: AsyncHandler[CreateStackInstancesRequest, CreateStackInstancesResult]): Future[CreateStackInstancesResult] = ??? - - override def createStackSetAsync(createStackSetRequest: CreateStackSetRequest): Future[CreateStackSetResult] = ??? - - override def createStackSetAsync(createStackSetRequest: CreateStackSetRequest, asyncHandler: AsyncHandler[CreateStackSetRequest, CreateStackSetResult]): Future[CreateStackSetResult] = ??? - - override def deleteChangeSetAsync(deleteChangeSetRequest: DeleteChangeSetRequest): Future[DeleteChangeSetResult] = ??? - - override def deleteChangeSetAsync(deleteChangeSetRequest: DeleteChangeSetRequest, asyncHandler: AsyncHandler[DeleteChangeSetRequest, DeleteChangeSetResult]): Future[DeleteChangeSetResult] = ??? - - override def deleteStackAsync(deleteStackRequest: DeleteStackRequest): Future[DeleteStackResult] = ??? - - override def deleteStackAsync(deleteStackRequest: DeleteStackRequest, asyncHandler: AsyncHandler[DeleteStackRequest, DeleteStackResult]): Future[DeleteStackResult] = ??? - - override def deleteStackInstancesAsync(deleteStackInstancesRequest: DeleteStackInstancesRequest): Future[DeleteStackInstancesResult] = ??? - - override def deleteStackInstancesAsync(deleteStackInstancesRequest: DeleteStackInstancesRequest, asyncHandler: AsyncHandler[DeleteStackInstancesRequest, DeleteStackInstancesResult]): Future[DeleteStackInstancesResult] = ??? - - override def deleteStackSetAsync(deleteStackSetRequest: DeleteStackSetRequest): Future[DeleteStackSetResult] = ??? - - override def deleteStackSetAsync(deleteStackSetRequest: DeleteStackSetRequest, asyncHandler: AsyncHandler[DeleteStackSetRequest, DeleteStackSetResult]): Future[DeleteStackSetResult] = ??? - - override def deregisterTypeAsync(deregisterTypeRequest: DeregisterTypeRequest): Future[DeregisterTypeResult] = ??? - - override def deregisterTypeAsync(deregisterTypeRequest: DeregisterTypeRequest, asyncHandler: AsyncHandler[DeregisterTypeRequest, DeregisterTypeResult]): Future[DeregisterTypeResult] = ??? - - override def describeAccountLimitsAsync(describeAccountLimitsRequest: DescribeAccountLimitsRequest): Future[DescribeAccountLimitsResult] = ??? - - override def describeAccountLimitsAsync(describeAccountLimitsRequest: DescribeAccountLimitsRequest, asyncHandler: AsyncHandler[DescribeAccountLimitsRequest, DescribeAccountLimitsResult]): Future[DescribeAccountLimitsResult] = ??? - - override def describeChangeSetAsync(describeChangeSetRequest: DescribeChangeSetRequest): Future[DescribeChangeSetResult] = ??? - - override def describeChangeSetAsync(describeChangeSetRequest: DescribeChangeSetRequest, asyncHandler: AsyncHandler[DescribeChangeSetRequest, DescribeChangeSetResult]): Future[DescribeChangeSetResult] = ??? - - override def describeStackDriftDetectionStatusAsync(describeStackDriftDetectionStatusRequest: DescribeStackDriftDetectionStatusRequest): Future[DescribeStackDriftDetectionStatusResult] = ??? - - override def describeStackDriftDetectionStatusAsync(describeStackDriftDetectionStatusRequest: DescribeStackDriftDetectionStatusRequest, asyncHandler: AsyncHandler[DescribeStackDriftDetectionStatusRequest, DescribeStackDriftDetectionStatusResult]): Future[DescribeStackDriftDetectionStatusResult] = ??? - - override def describeStackEventsAsync(describeStackEventsRequest: DescribeStackEventsRequest): Future[DescribeStackEventsResult] = ??? - - override def describeStackEventsAsync(describeStackEventsRequest: DescribeStackEventsRequest, asyncHandler: AsyncHandler[DescribeStackEventsRequest, DescribeStackEventsResult]): Future[DescribeStackEventsResult] = ??? - - override def describeStackInstanceAsync(describeStackInstanceRequest: DescribeStackInstanceRequest): Future[DescribeStackInstanceResult] = ??? - - override def describeStackInstanceAsync(describeStackInstanceRequest: DescribeStackInstanceRequest, asyncHandler: AsyncHandler[DescribeStackInstanceRequest, DescribeStackInstanceResult]): Future[DescribeStackInstanceResult] = ??? - - override def describeStackResourceAsync(describeStackResourceRequest: DescribeStackResourceRequest): Future[DescribeStackResourceResult] = ??? - - override def describeStackResourceAsync(describeStackResourceRequest: DescribeStackResourceRequest, asyncHandler: AsyncHandler[DescribeStackResourceRequest, DescribeStackResourceResult]): Future[DescribeStackResourceResult] = ??? - - override def describeStackResourceDriftsAsync(describeStackResourceDriftsRequest: DescribeStackResourceDriftsRequest): Future[DescribeStackResourceDriftsResult] = ??? - - override def describeStackResourceDriftsAsync(describeStackResourceDriftsRequest: DescribeStackResourceDriftsRequest, asyncHandler: AsyncHandler[DescribeStackResourceDriftsRequest, DescribeStackResourceDriftsResult]): Future[DescribeStackResourceDriftsResult] = ??? - - override def describeStackResourcesAsync(describeStackResourcesRequest: DescribeStackResourcesRequest): Future[DescribeStackResourcesResult] = ??? - - override def describeStackResourcesAsync(describeStackResourcesRequest: DescribeStackResourcesRequest, asyncHandler: AsyncHandler[DescribeStackResourcesRequest, DescribeStackResourcesResult]): Future[DescribeStackResourcesResult] = ??? - - override def describeStackSetAsync(describeStackSetRequest: DescribeStackSetRequest): Future[DescribeStackSetResult] = ??? - - override def describeStackSetAsync(describeStackSetRequest: DescribeStackSetRequest, asyncHandler: AsyncHandler[DescribeStackSetRequest, DescribeStackSetResult]): Future[DescribeStackSetResult] = ??? - - override def describeStackSetOperationAsync(describeStackSetOperationRequest: DescribeStackSetOperationRequest): Future[DescribeStackSetOperationResult] = ??? - - override def describeStackSetOperationAsync(describeStackSetOperationRequest: DescribeStackSetOperationRequest, asyncHandler: AsyncHandler[DescribeStackSetOperationRequest, DescribeStackSetOperationResult]): Future[DescribeStackSetOperationResult] = ??? - - override def describeStacksAsync(describeStacksRequest: DescribeStacksRequest): Future[DescribeStacksResult] = ??? - - override def describeStacksAsync(describeStacksRequest: DescribeStacksRequest, asyncHandler: AsyncHandler[DescribeStacksRequest, DescribeStacksResult]): Future[DescribeStacksResult] = ??? - - override def describeStacksAsync(): Future[DescribeStacksResult] = ??? - - override def describeStacksAsync(asyncHandler: AsyncHandler[DescribeStacksRequest, DescribeStacksResult]): Future[DescribeStacksResult] = ??? - - override def describeTypeAsync(describeTypeRequest: DescribeTypeRequest): Future[DescribeTypeResult] = ??? - - override def describeTypeAsync(describeTypeRequest: DescribeTypeRequest, asyncHandler: AsyncHandler[DescribeTypeRequest, DescribeTypeResult]): Future[DescribeTypeResult] = ??? - - override def describeTypeRegistrationAsync(describeTypeRegistrationRequest: DescribeTypeRegistrationRequest): Future[DescribeTypeRegistrationResult] = ??? - - override def describeTypeRegistrationAsync(describeTypeRegistrationRequest: DescribeTypeRegistrationRequest, asyncHandler: AsyncHandler[DescribeTypeRegistrationRequest, DescribeTypeRegistrationResult]): Future[DescribeTypeRegistrationResult] = ??? - - override def detectStackDriftAsync(detectStackDriftRequest: DetectStackDriftRequest): Future[DetectStackDriftResult] = ??? - - override def detectStackDriftAsync(detectStackDriftRequest: DetectStackDriftRequest, asyncHandler: AsyncHandler[DetectStackDriftRequest, DetectStackDriftResult]): Future[DetectStackDriftResult] = ??? - - override def detectStackResourceDriftAsync(detectStackResourceDriftRequest: DetectStackResourceDriftRequest): Future[DetectStackResourceDriftResult] = ??? - - override def detectStackResourceDriftAsync(detectStackResourceDriftRequest: DetectStackResourceDriftRequest, asyncHandler: AsyncHandler[DetectStackResourceDriftRequest, DetectStackResourceDriftResult]): Future[DetectStackResourceDriftResult] = ??? - - override def detectStackSetDriftAsync(detectStackSetDriftRequest: DetectStackSetDriftRequest): Future[DetectStackSetDriftResult] = ??? - - override def detectStackSetDriftAsync(detectStackSetDriftRequest: DetectStackSetDriftRequest, asyncHandler: AsyncHandler[DetectStackSetDriftRequest, DetectStackSetDriftResult]): Future[DetectStackSetDriftResult] = ??? - - override def estimateTemplateCostAsync(estimateTemplateCostRequest: EstimateTemplateCostRequest): Future[EstimateTemplateCostResult] = ??? - - override def estimateTemplateCostAsync(estimateTemplateCostRequest: EstimateTemplateCostRequest, asyncHandler: AsyncHandler[EstimateTemplateCostRequest, EstimateTemplateCostResult]): Future[EstimateTemplateCostResult] = ??? - - override def estimateTemplateCostAsync(): Future[EstimateTemplateCostResult] = ??? - - override def estimateTemplateCostAsync(asyncHandler: AsyncHandler[EstimateTemplateCostRequest, EstimateTemplateCostResult]): Future[EstimateTemplateCostResult] = ??? - - override def executeChangeSetAsync(executeChangeSetRequest: ExecuteChangeSetRequest): Future[ExecuteChangeSetResult] = ??? - - override def executeChangeSetAsync(executeChangeSetRequest: ExecuteChangeSetRequest, asyncHandler: AsyncHandler[ExecuteChangeSetRequest, ExecuteChangeSetResult]): Future[ExecuteChangeSetResult] = ??? - - override def getStackPolicyAsync(getStackPolicyRequest: GetStackPolicyRequest): Future[GetStackPolicyResult] = ??? - - override def getStackPolicyAsync(getStackPolicyRequest: GetStackPolicyRequest, asyncHandler: AsyncHandler[GetStackPolicyRequest, GetStackPolicyResult]): Future[GetStackPolicyResult] = ??? - - override def getTemplateAsync(getTemplateRequest: GetTemplateRequest): Future[GetTemplateResult] = ??? - - override def getTemplateAsync(getTemplateRequest: GetTemplateRequest, asyncHandler: AsyncHandler[GetTemplateRequest, GetTemplateResult]): Future[GetTemplateResult] = ??? - - override def getTemplateSummaryAsync(getTemplateSummaryRequest: GetTemplateSummaryRequest): Future[GetTemplateSummaryResult] = ??? - - override def getTemplateSummaryAsync(getTemplateSummaryRequest: GetTemplateSummaryRequest, asyncHandler: AsyncHandler[GetTemplateSummaryRequest, GetTemplateSummaryResult]): Future[GetTemplateSummaryResult] = ??? - - override def getTemplateSummaryAsync: Future[GetTemplateSummaryResult] = ??? - - override def getTemplateSummaryAsync(asyncHandler: AsyncHandler[GetTemplateSummaryRequest, GetTemplateSummaryResult]): Future[GetTemplateSummaryResult] = ??? - - override def listChangeSetsAsync(listChangeSetsRequest: ListChangeSetsRequest): Future[ListChangeSetsResult] = ??? - - override def listChangeSetsAsync(listChangeSetsRequest: ListChangeSetsRequest, asyncHandler: AsyncHandler[ListChangeSetsRequest, ListChangeSetsResult]): Future[ListChangeSetsResult] = ??? - - override def listExportsAsync(listExportsRequest: ListExportsRequest): Future[ListExportsResult] = ??? - - override def listExportsAsync(listExportsRequest: ListExportsRequest, asyncHandler: AsyncHandler[ListExportsRequest, ListExportsResult]): Future[ListExportsResult] = ??? - - override def listImportsAsync(listImportsRequest: ListImportsRequest): Future[ListImportsResult] = ??? - - override def listImportsAsync(listImportsRequest: ListImportsRequest, asyncHandler: AsyncHandler[ListImportsRequest, ListImportsResult]): Future[ListImportsResult] = ??? - - override def listStackInstancesAsync(listStackInstancesRequest: ListStackInstancesRequest): Future[ListStackInstancesResult] = ??? - - override def listStackInstancesAsync(listStackInstancesRequest: ListStackInstancesRequest, asyncHandler: AsyncHandler[ListStackInstancesRequest, ListStackInstancesResult]): Future[ListStackInstancesResult] = ??? - - override def listStackResourcesAsync(listStackResourcesRequest: ListStackResourcesRequest): Future[ListStackResourcesResult] = ??? - - override def listStackResourcesAsync(listStackResourcesRequest: ListStackResourcesRequest, asyncHandler: AsyncHandler[ListStackResourcesRequest, ListStackResourcesResult]): Future[ListStackResourcesResult] = ??? - - override def listStackSetOperationResultsAsync(listStackSetOperationResultsRequest: ListStackSetOperationResultsRequest): Future[ListStackSetOperationResultsResult] = ??? - - override def listStackSetOperationResultsAsync(listStackSetOperationResultsRequest: ListStackSetOperationResultsRequest, asyncHandler: AsyncHandler[ListStackSetOperationResultsRequest, ListStackSetOperationResultsResult]): Future[ListStackSetOperationResultsResult] = ??? - - override def listStackSetOperationsAsync(listStackSetOperationsRequest: ListStackSetOperationsRequest): Future[ListStackSetOperationsResult] = ??? - - override def listStackSetOperationsAsync(listStackSetOperationsRequest: ListStackSetOperationsRequest, asyncHandler: AsyncHandler[ListStackSetOperationsRequest, ListStackSetOperationsResult]): Future[ListStackSetOperationsResult] = ??? - - override def listStackSetsAsync(listStackSetsRequest: ListStackSetsRequest): Future[ListStackSetsResult] = ??? - - override def listStackSetsAsync(listStackSetsRequest: ListStackSetsRequest, asyncHandler: AsyncHandler[ListStackSetsRequest, ListStackSetsResult]): Future[ListStackSetsResult] = ??? - - override def listStacksAsync(listStacksRequest: ListStacksRequest): Future[ListStacksResult] = ??? - - override def listStacksAsync(listStacksRequest: ListStacksRequest, asyncHandler: AsyncHandler[ListStacksRequest, ListStacksResult]): Future[ListStacksResult] = ??? - - override def listStacksAsync(): Future[ListStacksResult] = ??? - - override def listStacksAsync(asyncHandler: AsyncHandler[ListStacksRequest, ListStacksResult]): Future[ListStacksResult] = ??? - - override def listTypeRegistrationsAsync(listTypeRegistrationsRequest: ListTypeRegistrationsRequest): Future[ListTypeRegistrationsResult] = ??? - - override def listTypeRegistrationsAsync(listTypeRegistrationsRequest: ListTypeRegistrationsRequest, asyncHandler: AsyncHandler[ListTypeRegistrationsRequest, ListTypeRegistrationsResult]): Future[ListTypeRegistrationsResult] = ??? - - override def listTypeVersionsAsync(listTypeVersionsRequest: ListTypeVersionsRequest): Future[ListTypeVersionsResult] = ??? - - override def listTypeVersionsAsync(listTypeVersionsRequest: ListTypeVersionsRequest, asyncHandler: AsyncHandler[ListTypeVersionsRequest, ListTypeVersionsResult]): Future[ListTypeVersionsResult] = ??? - - override def listTypesAsync(listTypesRequest: ListTypesRequest): Future[ListTypesResult] = ??? - - override def listTypesAsync(listTypesRequest: ListTypesRequest, asyncHandler: AsyncHandler[ListTypesRequest, ListTypesResult]): Future[ListTypesResult] = ??? - - override def recordHandlerProgressAsync(recordHandlerProgressRequest: RecordHandlerProgressRequest): Future[RecordHandlerProgressResult] = ??? - - override def recordHandlerProgressAsync(recordHandlerProgressRequest: RecordHandlerProgressRequest, asyncHandler: AsyncHandler[RecordHandlerProgressRequest, RecordHandlerProgressResult]): Future[RecordHandlerProgressResult] = ??? - - override def registerTypeAsync(registerTypeRequest: RegisterTypeRequest): Future[RegisterTypeResult] = ??? - - override def registerTypeAsync(registerTypeRequest: RegisterTypeRequest, asyncHandler: AsyncHandler[RegisterTypeRequest, RegisterTypeResult]): Future[RegisterTypeResult] = ??? - - override def setStackPolicyAsync(setStackPolicyRequest: SetStackPolicyRequest): Future[SetStackPolicyResult] = ??? - - override def setStackPolicyAsync(setStackPolicyRequest: SetStackPolicyRequest, asyncHandler: AsyncHandler[SetStackPolicyRequest, SetStackPolicyResult]): Future[SetStackPolicyResult] = ??? - - override def setTypeDefaultVersionAsync(setTypeDefaultVersionRequest: SetTypeDefaultVersionRequest): Future[SetTypeDefaultVersionResult] = ??? - - override def setTypeDefaultVersionAsync(setTypeDefaultVersionRequest: SetTypeDefaultVersionRequest, asyncHandler: AsyncHandler[SetTypeDefaultVersionRequest, SetTypeDefaultVersionResult]): Future[SetTypeDefaultVersionResult] = ??? - - override def signalResourceAsync(signalResourceRequest: SignalResourceRequest): Future[SignalResourceResult] = ??? - - override def signalResourceAsync(signalResourceRequest: SignalResourceRequest, asyncHandler: AsyncHandler[SignalResourceRequest, SignalResourceResult]): Future[SignalResourceResult] = ??? - - override def stopStackSetOperationAsync(stopStackSetOperationRequest: StopStackSetOperationRequest): Future[StopStackSetOperationResult] = ??? - - override def stopStackSetOperationAsync(stopStackSetOperationRequest: StopStackSetOperationRequest, asyncHandler: AsyncHandler[StopStackSetOperationRequest, StopStackSetOperationResult]): Future[StopStackSetOperationResult] = ??? - - override def updateStackAsync(updateStackRequest: UpdateStackRequest): Future[UpdateStackResult] = ??? - - override def updateStackAsync(updateStackRequest: UpdateStackRequest, asyncHandler: AsyncHandler[UpdateStackRequest, UpdateStackResult]): Future[UpdateStackResult] = ??? - - override def updateStackInstancesAsync(updateStackInstancesRequest: UpdateStackInstancesRequest): Future[UpdateStackInstancesResult] = ??? - - override def updateStackInstancesAsync(updateStackInstancesRequest: UpdateStackInstancesRequest, asyncHandler: AsyncHandler[UpdateStackInstancesRequest, UpdateStackInstancesResult]): Future[UpdateStackInstancesResult] = ??? - - override def updateStackSetAsync(updateStackSetRequest: UpdateStackSetRequest): Future[UpdateStackSetResult] = ??? - - override def updateStackSetAsync(updateStackSetRequest: UpdateStackSetRequest, asyncHandler: AsyncHandler[UpdateStackSetRequest, UpdateStackSetResult]): Future[UpdateStackSetResult] = ??? - - override def updateTerminationProtectionAsync(updateTerminationProtectionRequest: UpdateTerminationProtectionRequest): Future[UpdateTerminationProtectionResult] = ??? - - override def updateTerminationProtectionAsync(updateTerminationProtectionRequest: UpdateTerminationProtectionRequest, asyncHandler: AsyncHandler[UpdateTerminationProtectionRequest, UpdateTerminationProtectionResult]): Future[UpdateTerminationProtectionResult] = ??? - - override def validateTemplateAsync(validateTemplateRequest: ValidateTemplateRequest): Future[ValidateTemplateResult] = ??? - - override def validateTemplateAsync(validateTemplateRequest: ValidateTemplateRequest, asyncHandler: AsyncHandler[ValidateTemplateRequest, ValidateTemplateResult]): Future[ValidateTemplateResult] = ??? - - override def setEndpoint(endpoint: String): Unit = ??? - - override def setRegion(region: Region): Unit = ??? - - override def cancelUpdateStack(cancelUpdateStackRequest: CancelUpdateStackRequest): CancelUpdateStackResult = ??? - - override def continueUpdateRollback(continueUpdateRollbackRequest: ContinueUpdateRollbackRequest): ContinueUpdateRollbackResult = ??? - - override def createChangeSet(createChangeSetRequest: CreateChangeSetRequest): CreateChangeSetResult = ??? - - override def createStack(createStackRequest: CreateStackRequest): CreateStackResult = ??? - - override def createStackInstances(createStackInstancesRequest: CreateStackInstancesRequest): CreateStackInstancesResult = ??? - - override def createStackSet(createStackSetRequest: CreateStackSetRequest): CreateStackSetResult = ??? - - override def deleteChangeSet(deleteChangeSetRequest: DeleteChangeSetRequest): DeleteChangeSetResult = ??? - - override def deleteStack(deleteStackRequest: DeleteStackRequest): DeleteStackResult = ??? - - override def deleteStackInstances(deleteStackInstancesRequest: DeleteStackInstancesRequest): DeleteStackInstancesResult = ??? - - override def deleteStackSet(deleteStackSetRequest: DeleteStackSetRequest): DeleteStackSetResult = ??? - - override def deregisterType(deregisterTypeRequest: DeregisterTypeRequest): DeregisterTypeResult = ??? - - override def describeAccountLimits(describeAccountLimitsRequest: DescribeAccountLimitsRequest): DescribeAccountLimitsResult = ??? - - override def describeChangeSet(describeChangeSetRequest: DescribeChangeSetRequest): DescribeChangeSetResult = ??? - - override def describeStackDriftDetectionStatus(describeStackDriftDetectionStatusRequest: DescribeStackDriftDetectionStatusRequest): DescribeStackDriftDetectionStatusResult = ??? - - override def describeStackEvents(describeStackEventsRequest: DescribeStackEventsRequest): DescribeStackEventsResult = ??? - - override def describeStackInstance(describeStackInstanceRequest: DescribeStackInstanceRequest): DescribeStackInstanceResult = ??? - - override def describeStackResource(describeStackResourceRequest: DescribeStackResourceRequest): DescribeStackResourceResult = ??? - - override def describeStackResourceDrifts(describeStackResourceDriftsRequest: DescribeStackResourceDriftsRequest): DescribeStackResourceDriftsResult = ??? - - override def describeStackResources(describeStackResourcesRequest: DescribeStackResourcesRequest): DescribeStackResourcesResult = ??? - - override def describeStackSet(describeStackSetRequest: DescribeStackSetRequest): DescribeStackSetResult = ??? - - override def describeStackSetOperation(describeStackSetOperationRequest: DescribeStackSetOperationRequest): DescribeStackSetOperationResult = ??? - - override def describeStacks(describeStacksRequest: DescribeStacksRequest): DescribeStacksResult = ??? - - override def describeStacks(): DescribeStacksResult = ??? - - override def describeType(describeTypeRequest: DescribeTypeRequest): DescribeTypeResult = ??? - - override def describeTypeRegistration(describeTypeRegistrationRequest: DescribeTypeRegistrationRequest): DescribeTypeRegistrationResult = ??? - - override def detectStackDrift(detectStackDriftRequest: DetectStackDriftRequest): DetectStackDriftResult = ??? - - override def detectStackResourceDrift(detectStackResourceDriftRequest: DetectStackResourceDriftRequest): DetectStackResourceDriftResult = ??? - - override def detectStackSetDrift(detectStackSetDriftRequest: DetectStackSetDriftRequest): DetectStackSetDriftResult = ??? - - override def estimateTemplateCost(estimateTemplateCostRequest: EstimateTemplateCostRequest): EstimateTemplateCostResult = ??? - - override def estimateTemplateCost(): EstimateTemplateCostResult = ??? - - override def executeChangeSet(executeChangeSetRequest: ExecuteChangeSetRequest): ExecuteChangeSetResult = ??? - - override def getStackPolicy(getStackPolicyRequest: GetStackPolicyRequest): GetStackPolicyResult = ??? - - override def getTemplate(getTemplateRequest: GetTemplateRequest): GetTemplateResult = ??? - - override def getTemplateSummary(getTemplateSummaryRequest: GetTemplateSummaryRequest): GetTemplateSummaryResult = ??? - - override def getTemplateSummary: GetTemplateSummaryResult = ??? - - override def listChangeSets(listChangeSetsRequest: ListChangeSetsRequest): ListChangeSetsResult = ??? - - override def listExports(listExportsRequest: ListExportsRequest): ListExportsResult = ??? - - override def listImports(listImportsRequest: ListImportsRequest): ListImportsResult = ??? - - override def listStackInstances(listStackInstancesRequest: ListStackInstancesRequest): ListStackInstancesResult = ??? - - override def listStackResources(listStackResourcesRequest: ListStackResourcesRequest): ListStackResourcesResult = ??? - - override def listStackSetOperationResults(listStackSetOperationResultsRequest: ListStackSetOperationResultsRequest): ListStackSetOperationResultsResult = ??? - - override def listStackSetOperations(listStackSetOperationsRequest: ListStackSetOperationsRequest): ListStackSetOperationsResult = ??? - - override def listStackSets(listStackSetsRequest: ListStackSetsRequest): ListStackSetsResult = ??? - - override def listStacks(listStacksRequest: ListStacksRequest): ListStacksResult = ??? - - override def listStacks(): ListStacksResult = ??? - - override def listTypeRegistrations(listTypeRegistrationsRequest: ListTypeRegistrationsRequest): ListTypeRegistrationsResult = ??? - - override def listTypeVersions(listTypeVersionsRequest: ListTypeVersionsRequest): ListTypeVersionsResult = ??? - - override def listTypes(listTypesRequest: ListTypesRequest): ListTypesResult = ??? - - override def recordHandlerProgress(recordHandlerProgressRequest: RecordHandlerProgressRequest): RecordHandlerProgressResult = ??? - - override def registerType(registerTypeRequest: RegisterTypeRequest): RegisterTypeResult = ??? - - override def setStackPolicy(setStackPolicyRequest: SetStackPolicyRequest): SetStackPolicyResult = ??? - - override def setTypeDefaultVersion(setTypeDefaultVersionRequest: SetTypeDefaultVersionRequest): SetTypeDefaultVersionResult = ??? - - override def signalResource(signalResourceRequest: SignalResourceRequest): SignalResourceResult = ??? - - override def stopStackSetOperation(stopStackSetOperationRequest: StopStackSetOperationRequest): StopStackSetOperationResult = ??? - - override def updateStack(updateStackRequest: UpdateStackRequest): UpdateStackResult = ??? - - override def updateStackInstances(updateStackInstancesRequest: UpdateStackInstancesRequest): UpdateStackInstancesResult = ??? - - override def updateStackSet(updateStackSetRequest: UpdateStackSetRequest): UpdateStackSetResult = ??? - - override def updateTerminationProtection(updateTerminationProtectionRequest: UpdateTerminationProtectionRequest): UpdateTerminationProtectionResult = ??? - - override def validateTemplate(validateTemplateRequest: ValidateTemplateRequest): ValidateTemplateResult = ??? - - override def shutdown(): Unit = ??? - - override def getCachedResponseMetadata(request: AmazonWebServiceRequest): ResponseMetadata = ??? - - override def waiters(): AmazonCloudFormationWaiters = ??? -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/KmsDecrypterMockabilityTest.scala b/main/src/test/scala/com/dwolla/fs2aws/KmsDecrypterMockabilityTest.scala deleted file mode 100644 index 5375934f..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/KmsDecrypterMockabilityTest.scala +++ /dev/null @@ -1,33 +0,0 @@ -package com.dwolla.fs2aws - -import cats.effect._ -import com.dwolla.fs2aws.kms._ -import org.mockito.ArgumentMatchers -import org.specs2.concurrent.ExecutionEnv -import org.specs2.mock.Mockito -import org.specs2.mutable.Specification -import org.specs2.specification.Scope - -class KmsDecrypterMockabilityTest(implicit ee: ExecutionEnv) extends Specification with Mockito { - - trait Setup extends Scope { - val mockKmsDecrypter = mock[KmsDecrypter[IO]] - - mockKmsDecrypter.decrypt( - ArgumentMatchers.any[Transform[String]], - ArgumentMatchers.any[String] - ) returns IO("hello world").map(_.getBytes("UTF-8")) - } - - "KmsDecrypter" should { - "be mockable when constructed directly" in new Setup { - val output = mockKmsDecrypter.decrypt[String](base64DecodingTransform, "crypto-test") - .map(new String(_, "UTF-8")) - .unsafeToFuture() - - output should be_==("hello world").await - - } - } - -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/cloudformation/AmazonAsyncMockingImplicits.scala b/main/src/test/scala/com/dwolla/fs2aws/cloudformation/AmazonAsyncMockingImplicits.scala deleted file mode 100644 index 4b0ef6d4..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/cloudformation/AmazonAsyncMockingImplicits.scala +++ /dev/null @@ -1,101 +0,0 @@ -package com.dwolla.fs2aws.cloudformation - -import java.util.concurrent.{Future => JFuture} - -import com.amazonaws.AmazonWebServiceRequest -import com.amazonaws.handlers.AsyncHandler -import org.mockito.ArgumentMatchers._ -import org.mockito.Mockito._ -import org.mockito.invocation.InvocationOnMock - -import scala.reflect.ClassTag - -object AmazonAsyncMockingImplicits { - - /** - * For example: - * - * {{{ - * import com.dwolla.awssdk.AmazonMockAsyncImplicits._ - * import com.amazonaws.services.cloudformation.model.{UpdateStackRequest, UpdateStackResult} - * - * mockedMethod(mock[AmazonCloudFormationAsync].updateStackAsync) answers( - * new UpdateStackRequest() → new UpdateStackResult(), - * new UpdateStackRequest().withStackName("bad-name") → new RuntimeException("bad stack name!") - * ) - * }}} - * - * @param func the Amazon async client method to be mocked - * @tparam Req an Amazon web service request object - * @tparam Res an Amazon web service result object - * @return MockAnswerMappingBuilder - */ - def mockedMethod[Req <: AmazonWebServiceRequest : ClassTag, Res](func: (Req, AsyncHandler[Req, Res]) => JFuture[Res]): MockAnswerMappingBuilder[Req, Res] = new MockAnswerMappingBuilder(func) - - /** - * For example: - * - * {{{ - * import com.dwolla.awssdk.AmazonMockAsyncImplicits._ - * new UpdateStackResult() completes mock[AmazonCloudFormationAsync].updateStackAsync - * }}} - * - */ - implicit class AmazonAsyncResult[Res](res: Res) { - - def completes[Req <: AmazonWebServiceRequest](func: (Req, AsyncHandler[Req, Res]) => JFuture[Res]): Unit = { - - when(func(any[Req], any[AsyncHandler[Req, Res]])) thenAnswer ((invocation: InvocationOnMock) => { - invocation.getArguments match { - case Array(req, handler) => handler.asInstanceOf[AsyncHandler[Req, Res]].onSuccess(req.asInstanceOf[Req], res) - case _ => throw new IllegalArgumentException("must be invoked with the request and handler as arguments") - } - null - }) - - () - } - } - - /** - * For example: - * - * {{{ - * import com.dwolla.awssdk.AmazonMockAsyncImplicits._ - * import com.amazonaws.services.ecs.AmazonECSAsync - * import com.amazonaws.services.ecs.model.{Cluster, DescribeClustersResult, ListContainerInstancesRequest, ListContainerInstancesResult} - * - * Map( - * new ListContainerInstancesRequest().withCluster("cluster1") → new ListContainerInstancesResult().withContainerInstanceArns("arn1").withNextToken("next-token"), - * new ListContainerInstancesRequest().withCluster("cluster1").withNextToken("next-token") → new ListContainerInstancesResult().withContainerInstanceArns("arn2") - * ) completes mock[AmazonECSAsync].listContainerInstancesAsync - * }}} - */ - implicit class AmazonAsyncResults[Req <: AmazonWebServiceRequest : ClassTag, Res](responseMapping: Map[Req, Either[Exception, Res]]) { - - def completes(func: (Req, AsyncHandler[Req, Res]) => JFuture[Res]): Unit = { - when(func(any[Req], any[AsyncHandler[Req, Res]])) thenAnswer ((invocation: InvocationOnMock) => { - invocation.getArguments match { - case Array(req: Req, _) if !responseMapping.contains(req) => throw TestSetupException(s"req/res mapping does not contain `$req`. Look at the mock setup: is an expected case missing?") - case Array(req: Req, handler: AsyncHandler[Req, Res]) => responseMapping(req) match { - case Left(ex) => handler.onError(ex) - case Right(res) => handler.onSuccess(req, res) - } - case _ => throw new IllegalArgumentException("must be invoked with the request and handler as arguments") - } - null - }) - - () - } - - case class TestSetupException(msg: String) extends RuntimeException(msg) - } - - class MockAnswerMappingBuilder[Req <: AmazonWebServiceRequest : ClassTag, Res](func: (Req, AsyncHandler[Req, Res]) => JFuture[Res]) { - def answers(requestResponseMappings: (Req, Either[Exception, Res])*): Unit = Map(requestResponseMappings: _*) completes func - } - - implicit def liftExceptionToLeft[Res](ex: Exception): Either[Exception, Res] = Left(ex) - implicit def liftResponseToRight[Res](res: Res): Either[Exception, Res] = Right(res) -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClientSpec.scala b/main/src/test/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClientSpec.scala deleted file mode 100644 index 98008aec..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/cloudformation/CloudFormationClientSpec.scala +++ /dev/null @@ -1,114 +0,0 @@ -package com.dwolla.fs2aws.cloudformation - -import cats.effect._ -import com.amazonaws.handlers.AsyncHandler -import com.amazonaws.services.cloudformation.AmazonCloudFormationAsync -import com.amazonaws.services.cloudformation.model.StackStatus.UPDATE_COMPLETE -import com.amazonaws.services.cloudformation.model._ -import com.dwolla.fs2aws.cloudformation.AmazonAsyncMockingImplicits._ -import org.specs2.concurrent.ExecutionEnv -import org.specs2.mock.Mockito -import org.specs2.mutable.Specification -import org.specs2.specification.Scope - -//noinspection RedundantDefaultArgument -class CloudFormationClientSpec(implicit ee: ExecutionEnv) extends Specification with Mockito { - - trait Setup extends Scope { - val mockAsyncClient = mock[AmazonCloudFormationAsync] - val client = new CloudFormationClientImpl[IO](mockAsyncClient) - } - - trait StackExistsSetup extends Setup { - new UpdateStackResult().withStackId("updated-stack-id") completes mockAsyncClient.updateStackAsync - new DescribeStacksResult().withStacks(new Stack().withStackId("stack-id").withStackName("stack-name").withStackStatus(UPDATE_COMPLETE)) completes mockAsyncClient.describeStacksAsync - new CreateChangeSetResult().withStackId("stack-id-with-change-set") completes mockAsyncClient.createChangeSetAsync - } - - trait StackExistsOnSecondPageSetup extends Setup { - new UpdateStackResult().withStackId("updated-stack-id") completes mockAsyncClient.updateStackAsync - - Map( - new DescribeStacksRequest() -> Right(new DescribeStacksResult() - .withNextToken("next-token") - .withStacks(new Stack().withStackId("diff-stack-id").withStackName("diff-stack-name").withStackStatus(UPDATE_COMPLETE)) - ), - new DescribeStacksRequest().withNextToken("next-token") -> Right(new DescribeStacksResult() - .withStacks(new Stack().withStackId("stack-id").withStackName("stack-name").withStackStatus(UPDATE_COMPLETE)) - ) - ) completes mockAsyncClient.describeStacksAsync - new CreateChangeSetResult().withStackId("stack-id-with-change-set") completes mockAsyncClient.createChangeSetAsync - } - - trait StackMissingSetup extends Setup { - new DescribeStacksResult() completes mockAsyncClient.describeStacksAsync - new CreateStackResult().withStackId("created-stack-id") completes mockAsyncClient.createStackAsync - new CreateChangeSetResult().withStackId("created-stack-id-with-change-set") completes mockAsyncClient.createChangeSetAsync - } - - "createOrUpdateTemplate" should { - "call update stack when the stack exists and no changeset name is specified" in new StackExistsSetup { - val output = client.createOrUpdateTemplate("stack-name", "{}", changeSetName = None) - - output.unsafeToFuture() must be_==("updated-stack-id").await - - there was one(mockAsyncClient).updateStackAsync(any[UpdateStackRequest], any[AsyncHandler[UpdateStackRequest, UpdateStackResult]]) - there was no(mockAsyncClient).createChangeSetAsync(any[CreateChangeSetRequest], any[AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]]) - there was no(mockAsyncClient).createStackAsync(any[CreateStackRequest], any[AsyncHandler[CreateStackRequest, CreateStackResult]]) - } - - "call update stack when the stack exists (but not on the first page of results) and no changeset name is specified" in new StackExistsOnSecondPageSetup { - val output = client.createOrUpdateTemplate("stack-name", "{}", changeSetName = None) - - output.unsafeToFuture() must be_==("updated-stack-id").await - - there was one(mockAsyncClient).updateStackAsync(any[UpdateStackRequest], any[AsyncHandler[UpdateStackRequest, UpdateStackResult]]) - there was no(mockAsyncClient).createChangeSetAsync(any[CreateChangeSetRequest], any[AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]]) - there was no(mockAsyncClient).createStackAsync(any[CreateStackRequest], any[AsyncHandler[CreateStackRequest, CreateStackResult]]) - } - - "create change set when the stack exists and a changeset name is specified" in new StackExistsSetup { - val output = client.createOrUpdateTemplate("stack-name", "{}", changeSetName = Option("change-set-name")) - - output.unsafeToFuture() must be_==("stack-id-with-change-set").await - - val requestCaptor = capture[CreateChangeSetRequest] - - there was one(mockAsyncClient).createChangeSetAsync(requestCaptor.capture, any[AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]]) - there was no(mockAsyncClient).updateStackAsync(any[UpdateStackRequest], any[AsyncHandler[UpdateStackRequest, UpdateStackResult]]) - there was no(mockAsyncClient).createStackAsync(any[CreateStackRequest], any[AsyncHandler[CreateStackRequest, CreateStackResult]]) - - requestCaptor.value must beLike { case req => - req.getChangeSetName must_== "change-set-name" - req.getChangeSetType must_== "UPDATE" - } - } - - "create stack when it does not exist and no changeset name is specified" in new StackMissingSetup { - val output = client.createOrUpdateTemplate("stack-name", "{}") - - output.unsafeToFuture() must be_==("created-stack-id").await - - there was one(mockAsyncClient).createStackAsync(any[CreateStackRequest], any[AsyncHandler[CreateStackRequest, CreateStackResult]]) - there was no(mockAsyncClient).createChangeSetAsync(any[CreateChangeSetRequest], any[AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]]) - there was no(mockAsyncClient).updateStackAsync(any[UpdateStackRequest], any[AsyncHandler[UpdateStackRequest, UpdateStackResult]]) - } - - "create change set when stack does not exist and a changeset name is specified" in new StackMissingSetup { - val output = client.createOrUpdateTemplate("stack-name", "{}", changeSetName = Option("change-set-name")) - - output.unsafeToFuture() must be_==("created-stack-id-with-change-set").await - - val requestCaptor = capture[CreateChangeSetRequest] - - there was one(mockAsyncClient).createChangeSetAsync(requestCaptor.capture, any[AsyncHandler[CreateChangeSetRequest, CreateChangeSetResult]]) - there was no(mockAsyncClient).createStackAsync(any[CreateStackRequest], any[AsyncHandler[CreateStackRequest, CreateStackResult]]) - there was no(mockAsyncClient).updateStackAsync(any[UpdateStackRequest], any[AsyncHandler[UpdateStackRequest, UpdateStackResult]]) - - requestCaptor.value must beLike { case req => - req.getChangeSetName must_== "change-set-name" - req.getChangeSetType must_== "CREATE" - } - } - } -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/examples/ExecuteViaTest.scala b/main/src/test/scala/com/dwolla/fs2aws/examples/ExecuteViaTest.scala deleted file mode 100644 index 45b98009..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/examples/ExecuteViaTest.scala +++ /dev/null @@ -1,16 +0,0 @@ -package com.dwolla.fs2aws -package examples - -import cats.effect._ -import com.amazonaws.services.cloudformation._ -import com.amazonaws.services.cloudformation.model._ - -class ExecuteViaTest { - - val req = new DescribeStackEventsRequest() - - val client: AmazonCloudFormationAsync = new FakeAmazonCloudFormationAsyncClient - - val x: IO[DescribeStackEventsResult] = req.executeVia[IO](client.describeStackEventsAsync) - -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/examples/FetchAllTest.scala b/main/src/test/scala/com/dwolla/fs2aws/examples/FetchAllTest.scala deleted file mode 100644 index 8e778f0f..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/examples/FetchAllTest.scala +++ /dev/null @@ -1,19 +0,0 @@ -package com.dwolla.fs2aws -package examples - -import cats.effect._ -import com.amazonaws.services.cloudformation.AmazonCloudFormationAsync -import com.amazonaws.services.cloudformation.model._ -import fs2._ - -import scala.jdk.CollectionConverters._ - -class FetchAllTest { - - val requestFactory = () => new DescribeStackEventsRequest() - - val client: AmazonCloudFormationAsync = new FakeAmazonCloudFormationAsyncClient - - val x: Stream[IO, StackEvent] = requestFactory.fetchAll[IO](client.describeStackEventsAsync)(_.getStackEvents.asScala.toSeq) - -} diff --git a/main/src/test/scala/com/dwolla/fs2aws/s3/S3ClientSpec.scala b/main/src/test/scala/com/dwolla/fs2aws/s3/S3ClientSpec.scala deleted file mode 100644 index 351ca3a7..00000000 --- a/main/src/test/scala/com/dwolla/fs2aws/s3/S3ClientSpec.scala +++ /dev/null @@ -1,186 +0,0 @@ -package com.dwolla.fs2aws.s3 - -import java.io.ByteArrayInputStream - -import cats.effect._ -import cats.effect.concurrent.Deferred -import cats.implicits._ -import com.amazonaws.AmazonClientException -import com.amazonaws.event.{ProgressEvent, ProgressListener, _} -import com.amazonaws.services.s3.AmazonS3 -import com.amazonaws.services.s3.model._ -import com.amazonaws.services.s3.transfer._ -import com.amazonaws.services.s3.transfer.internal.S3ProgressListener -import com.amazonaws.services.s3.transfer.model.UploadResult -import com.dwolla.fs2aws.s3.S3Client.S3ClientImpl -import fs2._ -import org.apache.http.client.methods.HttpRequestBase -import org.mockito.Mockito._ -import org.mockito.invocation.InvocationOnMock -import org.mockito.stubbing.Answer -import org.specs2.concurrent.ExecutionEnv -import org.specs2.matcher.{IOMatchers, Matchers} -import org.specs2.mock.Mockito -import org.specs2.mutable.Specification -import org.specs2.specification.Scope - -class S3ClientSpec(implicit ee: ExecutionEnv) extends Specification with Matchers with Mockito with IOMatchers { - - implicit val cs = IO.contextShift(ee.executionContext) - - val blocker = Blocker.liftExecutionContext(ee.executionContext) - - trait setup extends Scope { - - val tm = mock[TransferManager] - val mockS3 = mock[AmazonS3] - when(tm.getAmazonS3Client).thenReturn(mockS3) - val client = new S3ClientImpl[IO](tm, blocker) - val bucket = tagBucket("bucket") - val key = tagKey("key") - val expectedContentType = "text/x-shellscript" - - val mockTransferManager = IO(mock[TransferManager]) - val objectMetadata = IO { - val om = new ObjectMetadata() - om.setContentType(expectedContentType) - om - } - } - - "S3Client" should { - "upload the bytes of the stream to the given S3 location" in new setup { - val expectedBytes = (0 until 100).map(_.toByte) - - for { - deferredUploadArguments <- Deferred[IO, (PutObjectRequest, S3ProgressListener)] - tm <- mockTransferManager.flatTap(tm => IO.asyncF[Unit] { mockSetupComplete => - for { - arguments <- IO.async[(PutObjectRequest, S3ProgressListener)] { captureArguments => - tm.upload(any[PutObjectRequest], any[S3ProgressListener]) answers ((arr: Array[AnyRef]) => - arr.toList match { - case (por: PutObjectRequest) :: (s3ProgressListener: S3ProgressListener) :: Nil => - captureArguments(Right((por, s3ProgressListener))) - new NoOpUpload - case _ => - captureArguments(Left(new RuntimeException("the wrong type of arguments were passed to the function"))) - null - }) - - mockSetupComplete(Right(())) - } - _ <- deferredUploadArguments.complete(arguments) - } yield () - }) - om <- objectMetadata - client: S3Client[IO] = new S3ClientImpl[IO](tm, blocker) - - fiber <- Concurrent[IO].start(Stream.emits(expectedBytes).covary[IO].through(client.uploadSink(bucket, key, om)).compile.drain) - - (putObjectRequest, s3ProgressListener) <- deferredUploadArguments.get - passedBytes <- io.readInputStream(IO(putObjectRequest.getInputStream), 16, blocker).compile.toList - - _ <- IO(s3ProgressListener.progressChanged(new ProgressEvent(ProgressEventType.TRANSFER_COMPLETED_EVENT))) - _ <- fiber.join - } yield { - putObjectRequest.getBucketName must_== "bucket" - putObjectRequest.getKey must_== "key" - passedBytes must_== expectedBytes - putObjectRequest.getMetadata.getContentType must_== expectedContentType - } - } - - "download the bytes of the stream from the given S3 location" in new setup { - val obj = mock[S3Object] - val expected = (0 until 100).toArray.map(_.toByte) - when(mockS3.getObject(bucket, key)).thenReturn(obj) - when(obj.getObjectContent).thenReturn(new S3ObjectInputStream(new ByteArrayInputStream(expected), mock[HttpRequestBase])) - - client.downloadObject(bucket, key).compile.toList must returnValue(equalTo(expected.toList)) - } - - "delete object from the given S3 location" in new setup { - client.deleteObject(bucket, key).compile.drain must returnOk - - there was one(mockS3).deleteObject(bucket, key) - } - - "list the keys in the given bucket when the results are not truncated" in new setup { - val listing = new ObjectListing() - val summary1 = new S3ObjectSummary() - summary1.setKey("key1") - listing.getObjectSummaries.add(summary1) - when(mockS3.listObjects(any[ListObjectsRequest])).thenAnswer(new Answer[ObjectListing] { - override def answer(invocation: InvocationOnMock): ObjectListing = { - val argument = invocation.getArgument[ListObjectsRequest](0) - - argument.getMarker match { - case null => - val x = new ObjectListing - x.getObjectSummaries.add(objectSummary(1)) - x.setTruncated(false) - x.setNextMarker(null) - x - case _ => null - } - } - }) - - private val output = client.listBucket(tagBucket("bucket")) - - output.compile.toList must returnValue(equalTo(List("key1"))) - } - - "list the keys in the given bucket when the results are paginated" in new setup { - when(mockS3.listObjects(any[ListObjectsRequest])).thenAnswer(new Answer[ObjectListing] { - override def answer(invocation: InvocationOnMock): ObjectListing = { - val argument = invocation.getArgument[ListObjectsRequest](0) - - argument.getMarker match { - case null => - val x = new ObjectListing - x.getObjectSummaries.add(objectSummary(1)) - x.setNextMarker("marker") - x.setTruncated(true) - x - case "marker" => - val x = new ObjectListing - x.getObjectSummaries.add(objectSummary(2)) - x.setTruncated(false) - x.setNextMarker(null) - x - case _ => null - } - } - }) - - private val output = client.listBucket(tagBucket("bucket")) - - output.compile.toList must returnValue(equalTo(List("key1", "key2"))) - } - } - - private def objectSummary(i: Int): S3ObjectSummary = { - val x = new S3ObjectSummary - x.setKey(s"key$i") - x - } - -} - -class NoOpUpload extends Upload { - override def waitForUploadResult(): UploadResult = ??? - override def pause(): PersistableUpload = ??? - override def tryPause(forceCancelTransfers: Boolean): PauseResult[PersistableUpload] = ??? - override def abort(): Unit = ??? - override def isDone: Boolean = ??? - override def waitForCompletion(): Unit = ??? - override def waitForException(): AmazonClientException = ??? - override def getDescription: String = ??? - override def getState: Transfer.TransferState = ??? - override def addProgressListener(listener: ProgressListener): Unit = ??? - override def removeProgressListener(listener: ProgressListener): Unit = ??? - override def getProgress: TransferProgress = ??? - @deprecated("upstream deprecation", "upstream") override def addProgressListener(listener: com.amazonaws.services.s3.model.ProgressListener): Unit = ??? - @deprecated("upstream deprecation", "upstream") override def removeProgressListener(listener: com.amazonaws.services.s3.model.ProgressListener): Unit = ??? -} diff --git a/project/plugins.sbt b/project/plugins.sbt index 3b3a0f3d..49d49f0e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,7 +3,7 @@ addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.1") addSbtPlugin("org.portable-scala" % "sbt-crossproject" % "1.1.0") addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.1.0") addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.20") -addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.10.1") +addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.13.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10") addSbtPlugin("org.scalablytyped.converter" % "sbt-converter" % "1.0.0-beta36") diff --git a/test-kit/src/main/scala-2.12/com/dwolla/fs2aws/kms/NoOpDecrypter.scala b/test-kit/src/main/scala-2.12/com/dwolla/fs2aws/kms/NoOpDecrypter.scala deleted file mode 100644 index d286c898..00000000 --- a/test-kit/src/main/scala-2.12/com/dwolla/fs2aws/kms/NoOpDecrypter.scala +++ /dev/null @@ -1,16 +0,0 @@ -package com.dwolla.fs2aws.kms - -import _root_.fs2._ -import cats._ -import scala.collection.compat._ - -class NoOpDecrypter[F[_] : Applicative] extends KmsDecrypter[F] { - override def decrypt[A](transformer: Transform[A], cryptoText: A): F[Array[Byte]] = - Applicative[F].pure(transformer(cryptoText)) - - override def decrypt[A](transform: Transform[A], cryptoTexts: (String, A)*): Stream[F, Map[String, Array[Byte]]] = - Stream.emit(cryptoTexts.toMap.view.mapValues(transform).toMap) - - override def decryptBase64(cryptoTexts: (String, String)*): Stream[F, Map[String, Array[Byte]]] = - Stream.emit(cryptoTexts.toMap.view.mapValues(base64DecodingTransform).toMap) -} diff --git a/test-kit/src/main/scala-2.13/com/dwolla/fs2aws/kms/NoOpDecrypter.scala b/test-kit/src/main/scala-2.13/com/dwolla/fs2aws/kms/NoOpDecrypter.scala deleted file mode 100644 index 6c784d28..00000000 --- a/test-kit/src/main/scala-2.13/com/dwolla/fs2aws/kms/NoOpDecrypter.scala +++ /dev/null @@ -1,15 +0,0 @@ -package com.dwolla.fs2aws.kms - -import cats._ -import fs2._ - -class NoOpDecrypter[F[_] : Applicative] extends KmsDecrypter[F] { - override def decrypt[A](transformer: Transform[A], cryptoText: A): F[Array[Byte]] = - Applicative[F].pure(transformer(cryptoText)) - - override def decrypt[A](transform: Transform[A], cryptoTexts: (String, A)*): Stream[F, Map[String, Array[Byte]]] = - Stream.emit(cryptoTexts.toMap.view.mapValues(transform).toMap) - - override def decryptBase64(cryptoTexts: (String, String)*): Stream[F, Map[String, Array[Byte]]] = - Stream.emit(cryptoTexts.toMap.view.mapValues(base64DecodingTransform).toMap) -} diff --git a/test-kit/src/main/scala/com/dwolla/fs2aws/kms/ExceptionRaisingDecrypter.scala b/test-kit/src/main/scala/com/dwolla/fs2aws/kms/ExceptionRaisingDecrypter.scala deleted file mode 100644 index 0d22d31b..00000000 --- a/test-kit/src/main/scala/com/dwolla/fs2aws/kms/ExceptionRaisingDecrypter.scala +++ /dev/null @@ -1,15 +0,0 @@ -package com.dwolla.fs2aws.kms - -import _root_.fs2._ -import cats._ - -class ExceptionRaisingDecrypter[F[_]](throwable: Throwable)(implicit F: ApplicativeError[F, Throwable]) extends KmsDecrypter[F] { - override def decrypt[A](transformer: Transform[A], cryptoText: A): F[Array[Byte]] = - ApplicativeError[F, Throwable].raiseError(throwable) - - override def decrypt[A](transform: Transform[A], cryptoTexts: (String, A)*): Stream[F, Map[String, Array[Byte]]] = - Stream.raiseError(throwable) - - override def decryptBase64(cryptoTexts: (String, String)*): Stream[F, Map[String, Array[Byte]]] = - Stream.raiseError(throwable) -}