Skip to content
This repository has been archived by the owner on Sep 16, 2023. It is now read-only.

Commit

Permalink
docs: update classification sample to use v2 model (#899)
Browse files Browse the repository at this point in the history
  • Loading branch information
wizeng23 authored Sep 27, 2022
1 parent 5a41ee4 commit 7b3a5f8
Showing 1 changed file with 22 additions and 12 deletions.
34 changes: 22 additions & 12 deletions samples/snippets/src/main/java/com/example/language/Analyze.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
import com.google.cloud.language.v1.AnalyzeSyntaxRequest;
import com.google.cloud.language.v1.AnalyzeSyntaxResponse;
import com.google.cloud.language.v1.ClassificationCategory;
import com.google.cloud.language.v1.ClassificationModelOptions;
import com.google.cloud.language.v1.ClassificationModelOptions.V2Model;
import com.google.cloud.language.v1.ClassificationModelOptions.V2Model.ContentCategoriesVersion;
import com.google.cloud.language.v1.ClassifyTextRequest;
import com.google.cloud.language.v1.ClassifyTextResponse;
import com.google.cloud.language.v1.Document;
Expand Down Expand Up @@ -124,7 +127,7 @@ public static void analyzeEntitiesFile(String gcsUri) throws Exception {
// [START language_entities_gcs]
// Instantiate the Language client com.google.cloud.language.v1.LanguageServiceClient
try (LanguageServiceClient language = LanguageServiceClient.create()) {
// set the GCS Content URI path to the file to be analyzed
// Set the GCS Content URI path to the file to be analyzed
Document doc =
Document.newBuilder().setGcsContentUri(gcsUri).setType(Type.PLAIN_TEXT).build();
AnalyzeEntitiesRequest request =
Expand Down Expand Up @@ -203,9 +206,9 @@ public static List<Token> analyzeSyntaxText(String text) throws Exception {
.setDocument(doc)
.setEncodingType(EncodingType.UTF16)
.build();
// analyze the syntax in the given text
// Analyze the syntax in the given text
AnalyzeSyntaxResponse response = language.analyzeSyntax(request);
// print the response
// Print the response
for (Token token : response.getTokensList()) {
System.out.printf("\tText: %s\n", token.getText().getContent());
System.out.printf("\tBeginOffset: %d\n", token.getText().getBeginOffset());
Expand Down Expand Up @@ -243,9 +246,9 @@ public static List<Token> analyzeSyntaxFile(String gcsUri) throws Exception {
.setDocument(doc)
.setEncodingType(EncodingType.UTF16)
.build();
// analyze the syntax in the given text
// Analyze the syntax in the given text
AnalyzeSyntaxResponse response = language.analyzeSyntax(request);
// print the response
// Print the response
for (Token token : response.getTokensList()) {
System.out.printf("\tText: %s\n", token.getText().getContent());
System.out.printf("\tBeginOffset: %d\n", token.getText().getBeginOffset());
Expand Down Expand Up @@ -277,10 +280,17 @@ public static void classifyText(String text) throws Exception {
// [START language_classify_text]
// Instantiate the Language client com.google.cloud.language.v1.LanguageServiceClient
try (LanguageServiceClient language = LanguageServiceClient.create()) {
// set content to the text string
// Set content to the text string
Document doc = Document.newBuilder().setContent(text).setType(Type.PLAIN_TEXT).build();
ClassifyTextRequest request = ClassifyTextRequest.newBuilder().setDocument(doc).build();
// detect categories in the given text
V2Model v2Model = V2Model.setContentCategoriesVersion(ContentCategoriesVersion.V2).build();
ClassificationModelOptions options =
ClassificationModelOptions.newBuilder().setV2Model(v2Model).build();
ClassifyTextRequest request =
ClassifyTextRequest.newBuilder()
.setDocument(doc)
.setClassificationModelOptions(options)
.build();
// Detect categories in the given text
ClassifyTextResponse response = language.classifyText(request);

for (ClassificationCategory category : response.getCategoriesList()) {
Expand All @@ -297,11 +307,11 @@ public static void classifyFile(String gcsUri) throws Exception {
// [START language_classify_gcs]
// Instantiate the Language client com.google.cloud.language.v1.LanguageServiceClient
try (LanguageServiceClient language = LanguageServiceClient.create()) {
// set the GCS content URI path
// Set the GCS content URI path
Document doc =
Document.newBuilder().setGcsContentUri(gcsUri).setType(Type.PLAIN_TEXT).build();
ClassifyTextRequest request = ClassifyTextRequest.newBuilder().setDocument(doc).build();
// detect categories in the given file
// Detect categories in the given file
ClassifyTextResponse response = language.classifyText(request);

for (ClassificationCategory category : response.getCategoriesList()) {
Expand All @@ -324,7 +334,7 @@ public static void entitySentimentText(String text) throws Exception {
.setDocument(doc)
.setEncodingType(EncodingType.UTF16)
.build();
// detect entity sentiments in the given string
// Detect entity sentiments in the given string
AnalyzeEntitySentimentResponse response = language.analyzeEntitySentiment(request);
// Print the response
for (Entity entity : response.getEntitiesList()) {
Expand All @@ -343,7 +353,7 @@ public static void entitySentimentText(String text) throws Exception {
// [END language_entity_sentiment_text]
}

/** Identifies the entity sentiments in the the GCS hosted file using the Language Beta API. */
/** Identifies the entity sentiments in the GCS hosted file using the Language Beta API. */
public static void entitySentimentFile(String gcsUri) throws Exception {
// [START language_entity_sentiment_gcs]
// Instantiate the Language client com.google.cloud.language.v1.LanguageServiceClient
Expand Down

0 comments on commit 7b3a5f8

Please sign in to comment.