Skip to content

Commit

Permalink
[TA] CodeGen based on 3.1-preview.3 (#17182)
Browse files Browse the repository at this point in the history
- only codegen and fixes after codegen, there is no new implementation added.
  • Loading branch information
mssfang authored Nov 4, 2020
1 parent 21bd526 commit c704f38
Show file tree
Hide file tree
Showing 220 changed files with 3,971 additions and 1,024 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,9 @@ private Mono<Response<AnalyzeSentimentResultCollection>> getAnalyzedSentimentRes
}
return service.sentimentWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT
)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Analyzed sentiment for a batch of documents - {}", response))
.doOnError(error -> logger.warning("Failed to analyze sentiment - {}", error))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,15 +150,15 @@ private Response<DetectLanguageResultCollection> toTextAnalyticsResultDocumentRe
private Mono<Response<DetectLanguageResultCollection>> getDetectedLanguageResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.languagesWithResponseAsync(
new LanguageBatchInput().setDocuments(toLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Detected languages for a batch of documents - {}",
response.getValue()))
.doOnError(error -> logger.warning("Failed to detect language - {}", error))
.map(this::toTextAnalyticsResultDocumentResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
new LanguageBatchInput().setDocuments(toLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics())
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Detected languages for a batch of documents - {}",
response.getValue()))
.doOnError(error -> logger.warning("Failed to detect language - {}", error))
.map(this::toTextAnalyticsResultDocumentResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,9 @@ private Mono<Response<ExtractKeyPhrasesResultCollection>> getExtractedKeyPhrases
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.keyPhrasesWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
options == null ? null : options.isIncludeStatistics())
.doOnSubscribe(ignoredValue -> logger.info("A batch of document - {}", documents.toString()))
.doOnSuccess(response -> logger.info("A batch of key phrases output - {}", response.getValue()))
.doOnError(error -> logger.warning("Failed to extract key phrases - {}", error))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,10 +187,10 @@ private Mono<Response<RecognizeEntitiesResultCollection>> getRecognizedEntitiesR
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.entitiesRecognitionGeneralWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized entities for a batch of documents- {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,10 @@ private IterableStream<LinkedEntity> mapLinkedEntity(
Context context) {
return service.entitiesLinkingWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized linked entities for a batch of documents - {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import com.azure.ai.textanalytics.implementation.models.DocumentError;
import com.azure.ai.textanalytics.implementation.models.EntitiesResult;
import com.azure.ai.textanalytics.implementation.models.MultiLanguageBatchInput;
import com.azure.ai.textanalytics.implementation.models.PiiEntitiesResult;
import com.azure.ai.textanalytics.implementation.models.PiiResult;
import com.azure.ai.textanalytics.implementation.models.StringIndexType;
import com.azure.ai.textanalytics.implementation.models.WarningCodeValue;
import com.azure.ai.textanalytics.models.EntityCategory;
Expand Down Expand Up @@ -145,8 +145,8 @@ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWi
* @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse(
final Response<PiiEntitiesResult> response) {
final PiiEntitiesResult piiEntitiesResult = response.getValue();
final Response<PiiResult> response) {
final PiiResult piiEntitiesResult = response.getValue();
// List of documents results
final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
piiEntitiesResult.getDocuments().forEach(documentEntities -> {
Expand Down Expand Up @@ -211,8 +211,11 @@ private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEnti
}
return service.entitiesRecognitionPiiWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
modelVersion, includeStatistics, domainFilter, StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion,
includeStatistics,
domainFilter,
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info(
"Start recognizing Personally Identifiable Information entities for a batch of documents."))
.doOnSuccess(response -> logger.info(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.ai.textanalytics.implementation;

import com.azure.core.annotation.ServiceClientBuilder;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.CookiePolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;

/**
* A builder for creating a new instance of the TextAnalyticsClient type.
*/
@ServiceClientBuilder(serviceClients = {TextAnalyticsClientImpl.class})
public final class TextAnalyticsClientBuilder {
/*
* Supported Cognitive Services endpoints (protocol and hostname, for
* example: https://westus.api.cognitive.microsoft.com).
*/
private String endpoint;

/**
* Sets Supported Cognitive Services endpoints (protocol and hostname, for example:
* https://westus.api.cognitive.microsoft.com).
*
* @param endpoint the endpoint value.
* @return the TextAnalyticsClientBuilder.
*/
public TextAnalyticsClientBuilder endpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}

/*
* The HTTP pipeline to send requests through
*/
private HttpPipeline pipeline;

/**
* Sets The HTTP pipeline to send requests through.
*
* @param pipeline the pipeline value.
* @return the TextAnalyticsClientBuilder.
*/
public TextAnalyticsClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}

/**
* Builds an instance of TextAnalyticsClientImpl with the provided parameters.
*
* @return an instance of TextAnalyticsClientImpl.
*/
public TextAnalyticsClientImpl buildClient() {
if (pipeline == null) {
this.pipeline = new HttpPipelineBuilder().policies(new UserAgentPolicy(), new RetryPolicy(),
new CookiePolicy()).build();
}
TextAnalyticsClientImpl client = new TextAnalyticsClientImpl(pipeline);
client.setEndpoint(this.endpoint);
return client;
}
}
Loading

0 comments on commit c704f38

Please sign in to comment.