Skip to content

Commit

Permalink
Fix failing tests: (#1599)
Browse files Browse the repository at this point in the history
* Update samples to use try for clients and update model IDs due to API backend changes

* Update to latest library to fix timeout
  • Loading branch information
nnegrey authored and bradmiro committed Nov 15, 2019
1 parent d23f4e4 commit 799368d
Show file tree
Hide file tree
Showing 7 changed files with 307 additions and 294 deletions.
2 changes: 1 addition & 1 deletion language/automl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-automl</artifactId>
<version>0.55.1-beta</version>
<version>0.114.0-beta</version>
</dependency>
<!-- [END automl_language_java_dependencies] -->
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

import java.io.IOException;
import java.io.PrintStream;
import java.util.Arrays;

import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.inf.ArgumentParser;
Expand Down Expand Up @@ -62,76 +63,33 @@ public static void createDataset(
String projectId, String computeRegion, String datasetName, Boolean multiLabel)
throws IOException {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();

// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, computeRegion);

// Classification type assigned based on multilabel value.
ClassificationType classificationType =
multiLabel ? ClassificationType.MULTILABEL : ClassificationType.MULTICLASS;

// Specify the text classification type for the dataset.
TextClassificationDatasetMetadata textClassificationDatasetMetadata =
TextClassificationDatasetMetadata.newBuilder()
.setClassificationType(classificationType)
.build();

// Set dataset name and dataset metadata.
Dataset myDataset =
Dataset.newBuilder()
.setDisplayName(datasetName)
.setTextClassificationDatasetMetadata(textClassificationDatasetMetadata)
.build();

// Create a dataset with the dataset metadata in the region.
Dataset dataset = client.createDataset(projectLocation, myDataset);

// Display the dataset information.
System.out.println(String.format("Dataset name: %s", dataset.getName()));
System.out.println(
String.format(
"Dataset id: %s",
dataset.getName().split("/")[dataset.getName().split("/").length - 1]));
System.out.println(String.format("Dataset display name: %s", dataset.getDisplayName()));
System.out.println("Text classification dataset metadata:");
System.out.print(String.format("\t%s", dataset.getTextClassificationDatasetMetadata()));
System.out.println(String.format("Dataset example count: %d", dataset.getExampleCount()));
System.out.println("Dataset create time:");
System.out.println(String.format("\tseconds: %s", dataset.getCreateTime().getSeconds()));
System.out.println(String.format("\tnanos: %s", dataset.getCreateTime().getNanos()));
}
// [END automl_language_create_dataset]
try (AutoMlClient client = AutoMlClient.create()) {

// [START automl_language_list_datasets]
/**
* Demonstrates using the AutoML client to list all datasets.
*
* @param projectId the Id of the project.
* @param computeRegion the Region name.
* @param filter the Filter expression.
* @throws IOException on Input/Output errors.
*/
public static void listDatasets(String projectId, String computeRegion, String filter)
throws IOException {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();
// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, computeRegion);

// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, computeRegion);
// Classification type assigned based on multilabel value.
ClassificationType classificationType =
multiLabel ? ClassificationType.MULTILABEL : ClassificationType.MULTICLASS;

// Build the List datasets request
ListDatasetsRequest request =
ListDatasetsRequest.newBuilder()
.setParent(projectLocation.toString())
.setFilter(filter)
.build();
// Specify the text classification type for the dataset.
TextClassificationDatasetMetadata textClassificationDatasetMetadata =
TextClassificationDatasetMetadata.newBuilder()
.setClassificationType(classificationType)
.build();

// Set dataset name and dataset metadata.
Dataset myDataset =
Dataset.newBuilder()
.setDisplayName(datasetName)
.setTextClassificationDatasetMetadata(textClassificationDatasetMetadata)
.build();

// Create a dataset with the dataset metadata in the region.
Dataset dataset = client.createDataset(projectLocation, myDataset);

// List all the datasets available in the region by applying filter.
System.out.println("List of datasets:");
for (Dataset dataset : client.listDatasets(request).iterateAll()) {
// Display the dataset information.
System.out.println(String.format("\nDataset name: %s", dataset.getName()));
System.out.println(String.format("Dataset name: %s", dataset.getName()));
System.out.println(
String.format(
"Dataset id: %s",
Expand All @@ -145,6 +103,51 @@ public static void listDatasets(String projectId, String computeRegion, String f
System.out.println(String.format("\tnanos: %s", dataset.getCreateTime().getNanos()));
}
}
// [END automl_language_create_dataset]

// [START automl_language_list_datasets]
/**
* Demonstrates using the AutoML client to list all datasets.
*
* @param projectId the Id of the project.
* @param computeRegion the Region name.
* @param filter the Filter expression.
* @throws IOException on Input/Output errors.
*/
public static void listDatasets(String projectId, String computeRegion, String filter)
throws IOException {
// Instantiates a client
try (AutoMlClient client = AutoMlClient.create()) {

// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, computeRegion);

// Build the List datasets request
ListDatasetsRequest request =
ListDatasetsRequest.newBuilder()
.setParent(projectLocation.toString())
.setFilter(filter)
.build();

// List all the datasets available in the region by applying filter.
System.out.println("List of datasets:");
for (Dataset dataset : client.listDatasets(request).iterateAll()) {
// Display the dataset information.
System.out.println(String.format("\nDataset name: %s", dataset.getName()));
System.out.println(
String.format(
"Dataset id: %s",
dataset.getName().split("/")[dataset.getName().split("/").length - 1]));
System.out.println(String.format("Dataset display name: %s", dataset.getDisplayName()));
System.out.println("Text classification dataset metadata:");
System.out.print(String.format("\t%s", dataset.getTextClassificationDatasetMetadata()));
System.out.println(String.format("Dataset example count: %d", dataset.getExampleCount()));
System.out.println("Dataset create time:");
System.out.println(String.format("\tseconds: %s", dataset.getCreateTime().getSeconds()));
System.out.println(String.format("\tnanos: %s", dataset.getCreateTime().getNanos()));
}
}
}
// [END automl_language_list_datasets]

// [START automl_language_get_dataset]
Expand All @@ -159,27 +162,28 @@ public static void listDatasets(String projectId, String computeRegion, String f
public static void getDataset(String projectId, String computeRegion, String datasetId)
throws IOException {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();

// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);

// Get all the information about a given dataset.
Dataset dataset = client.getDataset(datasetFullId);

// Display the dataset information.
System.out.println(String.format("Dataset name: %s", dataset.getName()));
System.out.println(
String.format(
"Dataset id: %s",
dataset.getName().split("/")[dataset.getName().split("/").length - 1]));
System.out.println(String.format("Dataset display name: %s", dataset.getDisplayName()));
System.out.println("Text classification dataset metadata:");
System.out.print(String.format("\t%s", dataset.getTextClassificationDatasetMetadata()));
System.out.println(String.format("Dataset example count: %d", dataset.getExampleCount()));
System.out.println("Dataset create time:");
System.out.println(String.format("\tseconds: %s", dataset.getCreateTime().getSeconds()));
System.out.println(String.format("\tnanos: %s", dataset.getCreateTime().getNanos()));
try (AutoMlClient client = AutoMlClient.create()) {

// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);

// Get all the information about a given dataset.
Dataset dataset = client.getDataset(datasetFullId);

// Display the dataset information.
System.out.println(String.format("Dataset name: %s", dataset.getName()));
System.out.println(
String.format(
"Dataset id: %s",
dataset.getName().split("/")[dataset.getName().split("/").length - 1]));
System.out.println(String.format("Dataset display name: %s", dataset.getDisplayName()));
System.out.println("Text classification dataset metadata:");
System.out.print(String.format("\t%s", dataset.getTextClassificationDatasetMetadata()));
System.out.println(String.format("Dataset example count: %d", dataset.getExampleCount()));
System.out.println("Dataset create time:");
System.out.println(String.format("\tseconds: %s", dataset.getCreateTime().getSeconds()));
System.out.println(String.format("\tnanos: %s", dataset.getCreateTime().getNanos()));
}
}
// [END automl_language_get_dataset]

Expand All @@ -197,25 +201,22 @@ public static void getDataset(String projectId, String computeRegion, String dat
public static void importData(
String projectId, String computeRegion, String datasetId, String path) throws Exception {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();

// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);
try (AutoMlClient client = AutoMlClient.create()) {

GcsSource.Builder gcsSource = GcsSource.newBuilder();
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);

// Get multiple training data files to be imported
String[] inputUris = path.split(",");
for (String inputUri : inputUris) {
gcsSource.addInputUris(inputUri);
}
// Get multiple training data files to be imported
GcsSource gcsSource =
GcsSource.newBuilder().addAllInputUris(Arrays.asList(path.split(","))).build();

// Import data from the input URI
InputConfig inputConfig = InputConfig.newBuilder().setGcsSource(gcsSource).build();
System.out.println("Processing import...");
// Import data from the input URI
InputConfig inputConfig = InputConfig.newBuilder().setGcsSource(gcsSource).build();
System.out.println("Processing import...");

Empty response = client.importDataAsync(datasetFullId, inputConfig).get();
System.out.println(String.format("Dataset imported. %s", response));
Empty response = client.importDataAsync(datasetFullId, inputConfig).get();
System.out.println(String.format("Dataset imported. %s", response));
}
}
// [END automl_language_import_data]

Expand All @@ -232,20 +233,23 @@ public static void importData(
public static void exportData(
String projectId, String computeRegion, String datasetId, String gcsUri) throws Exception {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();
try (AutoMlClient client = AutoMlClient.create()) {

// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);

// Set the output URI.
GcsDestination gcsDestination = GcsDestination.newBuilder().setOutputUriPrefix(gcsUri).build();
// Set the output URI.
GcsDestination gcsDestination =
GcsDestination.newBuilder().setOutputUriPrefix(gcsUri).build();

// Export the data to the output URI.
OutputConfig outputConfig = OutputConfig.newBuilder().setGcsDestination(gcsDestination).build();
System.out.println(String.format("Processing export..."));
// Export the data to the output URI.
OutputConfig outputConfig =
OutputConfig.newBuilder().setGcsDestination(gcsDestination).build();
System.out.println(String.format("Processing export..."));

Empty response = client.exportDataAsync(datasetFullId, outputConfig).get();
System.out.println(String.format("Dataset exported. %s", response));
Empty response = client.exportDataAsync(datasetFullId, outputConfig).get();
System.out.println(String.format("Dataset exported. %s", response));
}
}
// [END automl_language_export_data]

Expand All @@ -261,15 +265,16 @@ public static void exportData(
public static void deleteDataset(String projectId, String computeRegion, String datasetId)
throws Exception {
// Instantiates a client
AutoMlClient client = AutoMlClient.create();
try (AutoMlClient client = AutoMlClient.create()) {

// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);

// Delete a dataset.
Empty response = client.deleteDatasetAsync(datasetFullId).get();
// Delete a dataset.
Empty response = client.deleteDatasetAsync(datasetFullId).get();

System.out.println(String.format("Dataset deleted. %s", response));
System.out.println(String.format("Dataset deleted. %s", response));
}
}
// [END automl_language_delete_dataset]

Expand Down
Loading

0 comments on commit 799368d

Please sign in to comment.