Skip to content

Commit

Permalink
[Storage] Run CI and live tests on Java 17 (#24492)
Browse files Browse the repository at this point in the history
* does this work?

* try this.

* Revert "try this."

This reverts commit f157e60.

* does this help ?

* hmm?

* hungry?

* use java 17 in ci.

* fix at least nio.

* move it.

* fixes.

* disable these tests on java 17. CGLib doesn't work

* fix that.
  • Loading branch information
kasobol-msft authored Oct 4, 2021
1 parent 5835f9c commit 7f4dd0e
Show file tree
Hide file tree
Showing 12 changed files with 132 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1246,22 +1246,27 @@ class EncyptedBlockBlobAPITest extends APISpec {
ebc.getBlobUrl().toString())
.buildEncryptedBlobAsyncClient()

def bacDownloading = getEncryptedClientBuilder(fakeKey, null, environment.primaryAccount.credential,
ebc.getBlobUrl().toString())
.addPolicy({ context, next ->
def localData = data
def policy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return next.process()
.flatMap({ r ->
if (counter.incrementAndGet() == 1) {
/*
* When the download begins trigger an upload to overwrite the downloading blob
* so that the download is able to get an ETag before it is changed.
*/
return bacUploading.upload(data.defaultFlux, null, true)
return bacUploading.upload(localData.defaultFlux, null, true)
.thenReturn(r)
}
return Mono.just(r)
})
})
}
}
def bacDownloading = getEncryptedClientBuilder(fakeKey, null, environment.primaryAccount.credential,
ebc.getBlobUrl().toString())
.addPolicy(policy)
.buildEncryptedBlobAsyncClient()

/*
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
<src.dir>src/main/java</src.dir>
<test.dir>src/test/java</test.dir>
<AZURE_TEST_SYSTEM_PLAYBACK_PROPERTIES_FILE>../azure-storage-common/ci.system.properties</AZURE_TEST_SYSTEM_PLAYBACK_PROPERTIES_FILE>
<surefireXmx>6g</surefireXmx>
<surefireXmx>8g</surefireXmx>
<!-- Configures the Java 9+ run to perform the required module exports, opens, and reads that are necessary for testing but shouldn't be part of the module-info. -->
<javaModulesSurefireArgLine>
--add-exports com.azure.core/com.azure.core.implementation.http=ALL-UNNAMED
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@

package com.azure.storage.blob


import com.azure.core.http.HttpPipelineCallContext
import com.azure.core.http.HttpPipelineNextPolicy
import com.azure.core.http.HttpResponse
import com.azure.core.http.RequestConditions
import com.azure.core.http.policy.HttpPipelinePolicy
import com.azure.core.util.BinaryData
import com.azure.core.util.CoreUtils
import com.azure.core.util.polling.LongRunningOperationStatus
Expand Down Expand Up @@ -1151,22 +1154,26 @@ class BlobAPITest extends APISpec {
.credential(environment.primaryAccount.credential))
.buildAsyncClient()
.getBlockBlobAsyncClient()

def bacDownloading = instrument(new BlobClientBuilder()
.addPolicy({ context, next ->
def dataLocal = data
def policy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return next.process()
.flatMap({ r ->
if (counter.incrementAndGet() == 1) {
/*
* When the download begins trigger an upload to overwrite the downloading blob
* so that the download is able to get an ETag before it is changed.
*/
return bacUploading.upload(data.defaultFlux, data.defaultDataSize, true)
.thenReturn(r)
return bacUploading.upload(dataLocal.defaultFlux, dataLocal.defaultDataSize, true)
.thenReturn(r)
}
return Mono.just(r)
})
})
}
}
def bacDownloading = instrument(new BlobClientBuilder()
.addPolicy(policy)
.endpoint(bc.getBlobUrl())
.credential(environment.primaryAccount.credential))
.buildAsyncClient()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
package com.azure.storage.blob

import com.azure.core.http.HttpClient
import com.azure.core.http.HttpRequest
import com.azure.core.http.HttpResponse
import com.azure.storage.blob.models.BlobErrorCode
import com.azure.storage.blob.models.BlobStorageException
import com.azure.storage.blob.models.PageRange
Expand Down Expand Up @@ -93,12 +96,19 @@ class BlobOutputStreamTest extends APISpec {
def credentials = new StorageSharedKeyCredential("accountName", "accountKey")
def endpoint = "https://account.blob.core.windows.net/"
def data = getRandomByteArray(10 * Constants.MB)
def ex = (Exception) exception
def httpClient = new HttpClient() {
@Override
Mono<HttpResponse> send(HttpRequest httpRequest) {
return Mono.error(ex)
}
}
def blockBlobClient = new SpecializedBlobClientBuilder()
.endpoint(endpoint)
.containerName("container")
.blobName("blob")
.credential(credentials)
.httpClient({ httpRequest -> return Mono.error(exception) })
.httpClient(httpClient)
.buildBlockBlobClient()

when:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import com.azure.core.http.HttpMethod
import com.azure.core.http.HttpPipelineCallContext
import com.azure.core.http.HttpPipelineNextPolicy
import com.azure.core.http.HttpRequest
import com.azure.core.http.HttpResponse
import com.azure.core.http.policy.HttpPipelinePolicy
import com.azure.core.util.Context
import com.azure.core.util.FluxUtil
import com.azure.identity.DefaultAzureCredentialBuilder
Expand Down Expand Up @@ -1792,10 +1794,14 @@ class BlockBlobAPITest extends APISpec {
def mockHttpResponse = getStubResponse(500, new HttpRequest(HttpMethod.PUT, new URL("https://www.fake.com")))

// Mock a policy that will always then check that the data is still the same and return a retryable error.
def mockPolicy = { HttpPipelineCallContext context, HttpPipelineNextPolicy next ->
return collectBytesInBuffer(context.getHttpRequest().getBody())
.map({ it == data.defaultData })
.flatMap({ it ? Mono.just(mockHttpResponse) : Mono.error(new IllegalArgumentException()) })
def localData = data.defaultData
def mockPolicy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return collectBytesInBuffer(context.getHttpRequest().getBody())
.map({ it == localData })
.flatMap({ it ? Mono.just(mockHttpResponse) : Mono.error(new IllegalArgumentException()) }) as Mono<HttpResponse>
}
}

// Build the pipeline
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import com.azure.core.test.TestMode
import com.azure.core.util.ServiceVersion
import com.azure.core.util.logging.ClientLogger
import com.azure.identity.EnvironmentCredentialBuilder
import com.azure.storage.common.test.shared.policy.NoOpHttpPipelinePolicy
import okhttp3.ConnectionPool
import spock.lang.Specification

Expand Down Expand Up @@ -87,7 +88,7 @@ class StorageSpec extends Specification {
if (ENVIRONMENT.testMode == TestMode.RECORD) {
return interceptorManager.getRecordPolicy()
} else {
return { context, next -> return next.process() }
return NoOpHttpPipelinePolicy.INSTANCE
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.storage.common.test.shared.policy;

import com.azure.core.http.HttpPipelineCallContext;
import com.azure.core.http.HttpPipelineNextPolicy;
import com.azure.core.http.HttpResponse;
import com.azure.core.http.policy.HttpPipelinePolicy;
import reactor.core.publisher.Mono;

public final class NoOpHttpPipelinePolicy implements HttpPipelinePolicy {

public static final HttpPipelinePolicy INSTANCE = new NoOpHttpPipelinePolicy();

private NoOpHttpPipelinePolicy() {
}

@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return next.process();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.azure.core.http.HttpMethod
import com.azure.core.http.HttpPipelineCallContext
import com.azure.core.http.HttpPipelineNextPolicy
import com.azure.core.http.HttpRequest
import com.azure.core.http.HttpResponse
import com.azure.core.http.policy.HttpPipelinePolicy
import com.azure.core.http.rest.Response
import com.azure.core.util.Context
Expand Down Expand Up @@ -854,8 +855,12 @@ class DirectoryAPITest extends APISpec {
.setBatchSize(2)

// Mock a policy that will return an error on the call with the continuation token
HttpPipelinePolicy mockPolicy = { HttpPipelineCallContext context, HttpPipelineNextPolicy next ->
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(error) : next.process()
def localError = error
HttpPipelinePolicy mockPolicy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(localError) : next.process() as Mono<HttpResponse>
}
}

dc = getDirectoryClient(environment.dataLakeAccount.credential, dc.getDirectoryUrl(), dc.getObjectPath(), mockPolicy)
Expand Down Expand Up @@ -1261,8 +1266,12 @@ class DirectoryAPITest extends APISpec {
.setBatchSize(2)

// Mock a policy that will return an error on the call with the continuation token
HttpPipelinePolicy mockPolicy = { HttpPipelineCallContext context, HttpPipelineNextPolicy next ->
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(error) : next.process()
def localError = error
HttpPipelinePolicy mockPolicy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(localError) : next.process() as Mono<HttpResponse>
}
}

dc = getDirectoryClient(environment.dataLakeAccount.credential, dc.getDirectoryUrl(), dc.getObjectPath(), mockPolicy)
Expand Down Expand Up @@ -1667,8 +1676,12 @@ class DirectoryAPITest extends APISpec {
.setBatchSize(2)

// Mock a policy that will return an error on the call with the continuation token
HttpPipelinePolicy mockPolicy = { HttpPipelineCallContext context, HttpPipelineNextPolicy next ->
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(error) : next.process()
def localError = error
HttpPipelinePolicy mockPolicy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return context.getHttpRequest().getUrl().toString().contains("continuation") ? Mono.error(localError) : next.process() as Mono<HttpResponse>
}
}

dc = getDirectoryClient(environment.dataLakeAccount.credential, dc.getDirectoryUrl(), dc.getObjectPath(), mockPolicy)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
package com.azure.storage.file.datalake

import com.azure.core.exception.UnexpectedLengthException
import com.azure.core.http.HttpPipelineCallContext
import com.azure.core.http.HttpPipelineNextPolicy
import com.azure.core.http.HttpResponse
import com.azure.core.http.policy.HttpPipelinePolicy
import com.azure.core.test.TestMode
import com.azure.core.util.Context
import com.azure.core.util.FluxUtil
Expand Down Expand Up @@ -1515,21 +1519,26 @@ class FileAPITest extends APISpec {
.credential(environment.dataLakeAccount.credential))
.buildFileAsyncClient()

def facDownloading = instrument(new DataLakePathClientBuilder()
.addPolicy({ context, next ->
def localData = data
def policy = new HttpPipelinePolicy() {
@Override
Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
return next.process()
.flatMap({ r ->
if (counter.incrementAndGet() == 1) {
/*
* When the download begins trigger an upload to overwrite the downloading blob
* so that the download is able to get an ETag before it is changed.
*/
return facUploading.upload(data.defaultFlux, null, true)
return facUploading.upload(localData.defaultFlux, null, true)
.thenReturn(r)
}
return Mono.just(r)
})
})
}
}
def facDownloading = instrument(new DataLakePathClientBuilder()
.addPolicy(policy)
.endpoint(fc.getPathUrl())
.credential(environment.dataLakeAccount.credential))
.buildFileAsyncClient()
Expand Down Expand Up @@ -2658,6 +2667,8 @@ class FileAPITest extends APISpec {
"foo" | "bar" | "fizz" | "buzz"
}

// TODO https://github.com/cglib/cglib/issues/191 CGLib used to generate Spy doesn't work in Java 17
@IgnoreIf( { Runtime.version().feature() > 11 } )
@Unroll
@LiveOnly
def "Buffered upload options"() {
Expand Down Expand Up @@ -3921,6 +3932,8 @@ class FileAPITest extends APISpec {
}

/* Due to the inability to spy on a private method, we are just calling the async client with the input stream constructor */
// TODO https://github.com/cglib/cglib/issues/191 CGLib used to generate Spy doesn't work in Java 17
@IgnoreIf( { Runtime.version().feature() > 11 } )
@Unroll
@LiveOnly /* Flaky in playback. */
def "Upload numAppends"() {
Expand Down
6 changes: 6 additions & 0 deletions sdk/storage/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -107,3 +107,9 @@ extends:
AdditionalModules:
- name: azure-storage-perf
groupId: com.azure
MatrixConfigs:
- Name: Storage_ci
Path: sdk/storage/platform-matrix-ci.json
Selection: sparse
NonSparseParameters: Agent
GenerateVMJobs: true
14 changes: 14 additions & 0 deletions sdk/storage/platform-matrix-ci.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"matrix": {
"$IMPORT": "eng/pipelines/templates/stages/platform-matrix.json"
},
"include": [
{
"Agent": { "windows-2019": { "OSVmImage": "MMS2019", "Pool": "azsdk-pool-mms-win-2019-general" } },
"JavaTestVersion": "1.17",
"AZURE_TEST_HTTP_CLIENTS": "netty",
"TestGoals": "surefire:test",
"TestOptions": ""
}
]
}
5 changes: 5 additions & 0 deletions sdk/storage/platform-matrix.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
"JavaTestVersion": "1.11",
"AZURE_TEST_HTTP_CLIENTS": "okhttp"
},
{
"Agent": { "windows-2019": { "OSVmImage": "MMS2019", "Pool": "azsdk-pool-mms-win-2019-general" } },
"JavaTestVersion": "1.17",
"AZURE_TEST_HTTP_CLIENTS": "netty"
},
{
"Agent": {
"ubuntu-20.04": { "OSVmImage": "MMSUbuntu20.04", "Pool": "azsdk-pool-mms-ubuntu-2004-general" }
Expand Down

0 comments on commit 7f4dd0e

Please sign in to comment.