diff --git a/s3/src/test/scala/akka/stream/alpakka/s3/impl/DiskBufferSpec.scala b/s3/src/test/scala/akka/stream/alpakka/s3/impl/DiskBufferSpec.scala index 8e6cf30117..9efb130dab 100644 --- a/s3/src/test/scala/akka/stream/alpakka/s3/impl/DiskBufferSpec.scala +++ b/s3/src/test/scala/akka/stream/alpakka/s3/impl/DiskBufferSpec.scala @@ -43,8 +43,12 @@ class DiskBufferSpec(_system: ActorSystem) result should have size (1) val chunk = result.head + chunk shouldBe a[DiskChunk] + val diskChunk = chunk.asInstanceOf[DiskChunk] chunk.size should be(14) - chunk.data.runWith(Sink.seq).futureValue should be(Seq(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))) + diskChunk.data.runWith(Sink.seq).futureValue should be( + Seq(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)) + ) } it should "fail if more than maxSize bytes are fed into it" in { @@ -63,12 +67,14 @@ class DiskBufferSpec(_system: ActorSystem) it should "delete its temp file after N materializations" in { val tmpDir = Files.createTempDirectory("DiskBufferSpec").toFile() val before = tmpDir.list().size - val source = Source(Vector(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))) + val chunk = Source(Vector(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))) .via(new DiskBuffer(2, 200, Some(tmpDir.toPath))) .runWith(Sink.seq) .futureValue .head - .data + + chunk shouldBe a[DiskChunk] + val source = chunk.asInstanceOf[DiskChunk].data tmpDir.list().size should be(before + 1) diff --git a/s3/src/test/scala/akka/stream/alpakka/s3/impl/HttpRequestsSpec.scala b/s3/src/test/scala/akka/stream/alpakka/s3/impl/HttpRequestsSpec.scala index dab45f65ab..f4e0e3b950 100644 --- a/s3/src/test/scala/akka/stream/alpakka/s3/impl/HttpRequestsSpec.scala +++ b/s3/src/test/scala/akka/stream/alpakka/s3/impl/HttpRequestsSpec.scala @@ -14,8 +14,8 @@ import akka.http.scaladsl.model.headers.{`Raw-Request-URI`, ByteRange, RawHeader import akka.stream.alpakka.s3.headers.{CannedAcl, ServerSideEncryption, StorageClass} import akka.stream.alpakka.s3._ import akka.stream.alpakka.testkit.scaladsl.LogCapturing -import akka.stream.scaladsl.Source import akka.testkit.{SocketUtil, TestKit, TestProbe} +import akka.util.ByteString import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -272,7 +272,7 @@ class HttpRequestsSpec extends AnyFlatSpec with Matchers with ScalaFutures with implicit val settings = getSettings(s3Region = Region.EU_WEST_1).withEndpointUrl("http://localhost:8080") val req = - HttpRequests.uploadPartRequest(multipartUpload, 1, Source.empty, 1) + HttpRequests.uploadPartRequest(multipartUpload, 1, MemoryChunk(ByteString.empty)) req.uri.scheme shouldEqual "http" req.uri.authority.host.address shouldEqual "localhost" @@ -284,7 +284,7 @@ class HttpRequestsSpec extends AnyFlatSpec with Matchers with ScalaFutures with val myKey = "my-key" val md5Key = "md5-key" val s3Headers = ServerSideEncryption.customerKeys(myKey).withMd5(md5Key).headersFor(UploadPart) - val req = HttpRequests.uploadPartRequest(multipartUpload, 1, Source.empty, 1, s3Headers) + val req = HttpRequests.uploadPartRequest(multipartUpload, 1, MemoryChunk(ByteString.empty), s3Headers) req.headers should contain(RawHeader("x-amz-server-side-encryption-customer-algorithm", "AES256")) req.headers should contain(RawHeader("x-amz-server-side-encryption-customer-key", myKey)) diff --git a/s3/src/test/scala/akka/stream/alpakka/s3/impl/MemoryBufferSpec.scala b/s3/src/test/scala/akka/stream/alpakka/s3/impl/MemoryBufferSpec.scala index c3ba9b3185..849560aeb1 100644 --- a/s3/src/test/scala/akka/stream/alpakka/s3/impl/MemoryBufferSpec.scala +++ b/s3/src/test/scala/akka/stream/alpakka/s3/impl/MemoryBufferSpec.scala @@ -39,7 +39,9 @@ class MemoryBufferSpec(_system: ActorSystem) result should have size (1) val chunk = result.head chunk.size should be(14) - chunk.data.runWith(Sink.seq).futureValue should be(Seq(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))) + + chunk shouldBe a[MemoryChunk] + chunk.asInstanceOf[MemoryChunk].data should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)) } it should "fail if more than maxSize bytes are fed into it" in {