Skip to content

Commit

Permalink
Regenerated Synapse Spark SDK from tag package-spark-2020-12-01 (#24510)
Browse files Browse the repository at this point in the history
  • Loading branch information
g2vinay authored Oct 11, 2021
1 parent 107e0bc commit 744db07
Show file tree
Hide file tree
Showing 13 changed files with 453 additions and 1,194 deletions.
8 changes: 7 additions & 1 deletion sdk/synapse/azure-analytics-synapse-spark/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
# Release History

## 1.0.0-beta.5 (Unreleased)
## 1.0.0-beta.5 (2020-11-10)

### New Features
- Added `ClientOptions` to `SparkClientBuilder`
- Added `LivyStatementStates` and `LivyStates` enum classes.

### Other Changes
- Re-generate with latest release tag `package-spark-2020-12-01`

## 1.0.0-beta.4 (2021-08-10)
- Update to 2020-12-01 API version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@
import com.azure.core.annotation.ServiceClientBuilder;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpHeaders;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.AddHeadersPolicy;
import com.azure.core.http.policy.BearerTokenAuthenticationPolicy;
import com.azure.core.http.policy.CookiePolicy;
import com.azure.core.http.policy.HttpLogOptions;
Expand All @@ -18,7 +20,9 @@
import com.azure.core.http.policy.HttpPolicyProviders;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;
import com.azure.core.util.ClientOptions;
import com.azure.core.util.Configuration;
import com.azure.core.util.CoreUtils;
import com.azure.core.util.serializer.JacksonAdapter;
import com.azure.core.util.serializer.SerializerAdapter;
import java.util.ArrayList;
Expand Down Expand Up @@ -216,6 +220,23 @@ public SparkClientBuilder retryPolicy(RetryPolicy retryPolicy) {
*/
private final List<HttpPipelinePolicy> pipelinePolicies;

/*
* The client options such as application ID and custom headers to set on a
* request.
*/
private ClientOptions clientOptions;

/**
* Sets The client options such as application ID and custom headers to set on a request.
*
* @param clientOptions the clientOptions value.
* @return the SparkClientBuilder.
*/
public SparkClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}

/**
* Adds a custom Http pipeline policy.
*
Expand All @@ -233,9 +254,6 @@ public SparkClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
* @return an instance of SparkClientImpl.
*/
private SparkClientImpl buildInnerClient() {
if (livyApiVersion == null) {
this.livyApiVersion = "2019-11-01-preview";
}
if (pipeline == null) {
this.pipeline = createHttpPipeline();
}
Expand All @@ -253,11 +271,19 @@ private HttpPipeline createHttpPipeline() {
if (httpLogOptions == null) {
httpLogOptions = new HttpLogOptions();
}
if (clientOptions == null) {
clientOptions = new ClientOptions();
}
List<HttpPipelinePolicy> policies = new ArrayList<>();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policies.add(
new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion, buildConfiguration));
String applicationId = CoreUtils.getApplicationId(clientOptions, httpLogOptions);
policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion, buildConfiguration));
HttpHeaders headers = new HttpHeaders();
clientOptions.getHeaders().forEach(header -> headers.set(header.getName(), header.getValue()));
if (headers.getSize() > 0) {
policies.add(new AddHeadersPolicy(headers));
}
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy == null ? new RetryPolicy() : retryPolicy);
policies.add(new CookiePolicy());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.analytics.synapse.spark.models;

import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;

/** Defines values for LivyStatementStates. */
public final class LivyStatementStates extends ExpandableStringEnum<LivyStatementStates> {
/** Static value waiting for LivyStatementStates. */
public static final LivyStatementStates WAITING = fromString("waiting");

/** Static value running for LivyStatementStates. */
public static final LivyStatementStates RUNNING = fromString("running");

/** Static value available for LivyStatementStates. */
public static final LivyStatementStates AVAILABLE = fromString("available");

/** Static value error for LivyStatementStates. */
public static final LivyStatementStates ERROR = fromString("error");

/** Static value cancelling for LivyStatementStates. */
public static final LivyStatementStates CANCELLING = fromString("cancelling");

/** Static value cancelled for LivyStatementStates. */
public static final LivyStatementStates CANCELLED = fromString("cancelled");

/**
* Creates or finds a LivyStatementStates from its string representation.
*
* @param name a name to look for.
* @return the corresponding LivyStatementStates.
*/
@JsonCreator
public static LivyStatementStates fromString(String name) {
return fromString(name, LivyStatementStates.class);
}

/** @return known LivyStatementStates values. */
public static Collection<LivyStatementStates> values() {
return values(LivyStatementStates.class);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.analytics.synapse.spark.models;

import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;

/** Defines values for LivyStates. */
public final class LivyStates extends ExpandableStringEnum<LivyStates> {
/** Static value not_started for LivyStates. */
public static final LivyStates NOT_STARTED = fromString("not_started");

/** Static value starting for LivyStates. */
public static final LivyStates STARTING = fromString("starting");

/** Static value idle for LivyStates. */
public static final LivyStates IDLE = fromString("idle");

/** Static value busy for LivyStates. */
public static final LivyStates BUSY = fromString("busy");

/** Static value shutting_down for LivyStates. */
public static final LivyStates SHUTTING_DOWN = fromString("shutting_down");

/** Static value error for LivyStates. */
public static final LivyStates ERROR = fromString("error");

/** Static value dead for LivyStates. */
public static final LivyStates DEAD = fromString("dead");

/** Static value killed for LivyStates. */
public static final LivyStates KILLED = fromString("killed");

/** Static value success for LivyStates. */
public static final LivyStates SUCCESS = fromString("success");

/** Static value running for LivyStates. */
public static final LivyStates RUNNING = fromString("running");

/** Static value recovering for LivyStates. */
public static final LivyStates RECOVERING = fromString("recovering");

/**
* Creates or finds a LivyStates from its string representation.
*
* @param name a name to look for.
* @return the corresponding LivyStates.
*/
@JsonCreator
public static LivyStates fromString(String name) {
return fromString(name, LivyStates.class);
}

/** @return known LivyStates values. */
public static Collection<LivyStates> values() {
return values(LivyStates.class);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public final class SparkBatchJob {
* The batch state
*/
@JsonProperty(value = "state")
private String state;
private LivyStates state;

/*
* The log lines.
Expand Down Expand Up @@ -445,7 +445,7 @@ public SparkBatchJob setAppInfo(Map<String, String> appInfo) {
*
* @return the state value.
*/
public String getState() {
public LivyStates getState() {
return this.state;
}

Expand All @@ -455,7 +455,7 @@ public String getState() {
* @param state the state value to set.
* @return the SparkBatchJob object itself.
*/
public SparkBatchJob setState(String state) {
public SparkBatchJob setState(LivyStates state) {
this.state = state;
return this;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,10 @@ public final class SparkSession {
private Map<String, String> appInfo;

/*
* The state property.
* The session state.
*/
@JsonProperty(value = "state")
private String state;
private LivyStates state;

/*
* The log property.
Expand Down Expand Up @@ -441,21 +441,21 @@ public SparkSession setAppInfo(Map<String, String> appInfo) {
}

/**
* Get the state property: The state property.
* Get the state property: The session state.
*
* @return the state value.
*/
public String getState() {
public LivyStates getState() {
return this.state;
}

/**
* Set the state property: The state property.
* Set the state property: The session state.
*
* @param state the state value to set.
* @return the SparkSession object itself.
*/
public SparkSession setState(String state) {
public SparkSession setState(LivyStates state) {
this.state = state;
return this;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public final class SparkStatement {
* The state property.
*/
@JsonProperty(value = "state")
private String state;
private LivyStatementStates state;

/*
* The output property.
Expand Down Expand Up @@ -79,7 +79,7 @@ public SparkStatement setCode(String code) {
*
* @return the state value.
*/
public String getState() {
public LivyStatementStates getState() {
return this.state;
}

Expand All @@ -89,7 +89,7 @@ public String getState() {
* @param state the state value to set.
* @return the SparkStatement object itself.
*/
public SparkStatement setState(String state) {
public SparkStatement setState(LivyStatementStates state) {
this.state = state;
return this;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ protected void beforeTest() {
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.sparkPoolName(getSparkPoolName())
.livyApiVersion(livyApiVersion)
.buildSparkBatchClient());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ public abstract class SparkClientTestBase extends TestBase {
private final String clientName = properties.getOrDefault(NAME, "UnknownName");
private final String clientVersion = properties.getOrDefault(VERSION, "UnknownVersion");
private final String fakeSparkPool = "testsparkpool";
final String livyApiVersion = "2019-11-01-preview";

protected String getEndpoint() {
String endpoint = interceptorManager.isPlaybackMode()
Expand All @@ -55,9 +56,9 @@ <T> T clientSetup(Function<HttpPipeline, T> clientBuilder) {
TokenCredential credential = null;

if (!interceptorManager.isPlaybackMode()) {
String clientId = System.getenv("CLIENT_ID");
String clientKey = System.getenv("CLIENT_SECRET");
String tenantId = System.getenv("TENANT_ID");
String clientId = System.getenv("AZURE_CLIENT_ID");
String clientKey = System.getenv("AZURE_CLIENT_SECRET");
String tenantId = System.getenv("AZURE_TENANT_ID");
Objects.requireNonNull(clientId, "The client id cannot be null");
Objects.requireNonNull(clientKey, "The client key cannot be null");
Objects.requireNonNull(tenantId, "The tenant id cannot be null");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ protected void beforeTest() {
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.sparkPoolName(getSparkPoolName())
.livyApiVersion(livyApiVersion)
.buildSparkSessionClient());
}

Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions sdk/synapse/azure-analytics-synapse-spark/swagger/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,11 @@ autorest --java --use=C:/work/autorest.java

### Code generation settings
```yaml
branch: 3d6211cf28f83236cdf78e7cfc50efd3fb7cba72
branch: bee724836ffdeb5458274037dc75f4d43576b5e3
repo: https://github.com/Azure/azure-rest-api-specs/blob/$(branch)
```
```yaml
input-file:
- $(repo)/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/sparkJob.json
java: true
output-folder: ..\
generate-client-as-impl: true
Expand All @@ -41,6 +39,8 @@ context-client-method-parameter: true
required-parameter-client-methods: true
credential-types: tokencredential
credential-scopes: https://dev.azuresynapse.net/.default
require: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/$(branch)/specification/synapse/data-plane/readme.md
tag: package-spark-2020-12-01
```
### Add x-ms-client-default to livyApiVersion
Expand Down

0 comments on commit 744db07

Please sign in to comment.