diff --git a/test-apps/README.md b/test-apps/README.md index c177ac8..f9e265b 100644 --- a/test-apps/README.md +++ b/test-apps/README.md @@ -1,42 +1,50 @@ gcloud-java Test Applications ============================= -This folder contains examples of end-to-end test apps for gcloud-java. +This folder contains examples of test apps for gcloud-java. In order to run these tests, you will need to: -* Create a [Google Developers Console](https://console.developers.google.com/) project with the BigQuery, Datastore, and Storage JSON API enabled. -* [Enable billing](https://support.google.com/cloud/answer/6158867?hl=en). [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. +* Create a [Google Developers Console](https://console.developers.google.com/) project with the BigQuery, Datastore, and Storage JSON APIs enabled. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. +* [Enable billing](https://support.google.com/cloud/answer/6158867?hl=en). * Set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. To run the App Engine test app: -1. Change "your-app-id" to your project ID in `src/main/webapp/WEB-INF/appengine-web.xml` and in `src/main/java/com/google/gcloud/tests/gcloud-appengine-test/MainServlet.java`. -2. Create JSON service account credentials and save it as follows: `src/main/webapp/WEB-INF/lib/my-service-account-credentials.json`. + +1. Change "your-project-id" to your project ID in `src/main/webapp/WEB-INF/appengine-web.xml` and in `src/main/java/com/google/gcloud/tests/gcloud-appengine-test/MainServlet.java`. +2. Create JSON service account credentials and save the file as follows: `src/main/webapp/WEB-INF/lib/my-service-account-credentials.json`. 3. Run `mvn appengine:devserver` to test locally and `mvn appengine:update` to test the app in production. +4. Navigate to the URL where the app is deployed. Add the service you wish to test to the URL ("bigquery", "datastore", "resourcemanager", or "storage"). You can also set optional request parameters for the project ID and whether to use the JSON credentials file you included in `WEB-INF/lib`. Here's an example URL using datastore with project ID "my-project-id" that uses the JSON credentials file: + + ``` + http://localhost:8080/datastore?project-id=my-project-id&credentials-file=true + ``` -To run the Compute Engine test app: +To run the command line test app on Compute Engine: -1. Change "your-app-id" to your project ID in `src/main/java/com/google/gcloud/tests/gcloud-java-compute-test/GcloudJavaComputeEngineTest.java`. +1. Change "your-project-id" to your project ID in `src/main/java/com/google/gcloud/tests/gcloud-java-command-line-test/GcloudJavaCommandLineTest.java`. 2. Create JSON service account credentials and save the file in application's base directory as `my-service-account-credentials.json`. 3. Create a Compute Engine instance with the User Info, Big Query, Datastore, and Storage APIs (read write) enabled. 4. Copy your the app to your instance using the Google Cloud SDK command: + ``` - gcloud compute copy-files [the app's base directory] [username@instance-name]:~ + gcloud compute copy-files test-apps/command-line-test-app [username@instance-name]:~ ``` 5. Ensure that Maven and the Java 7 JDK are installed. Also check that the environment variable `JAVA_HOME` points to the Java 7 JDK. -6. Run the app using the command +6. Ensure that you have User Credentials available on your Compute Engine instance. These credentials are necessary to use Resource Manager. If you get authentication errors stemming from inadequate authentication scope in step 7, you can copy your gcloud SDK credentials (usually located in the `~/.config/gcloud` directory) to your Compute Engine instance and set the GOOGLE_APPLICATION_CREDENTIALS environment variable as follows: `export GOOGLE_APPLICATION_CREDENTIALS=`. +7. Run the app using Maven's exec plugin. Specify the service and optional parameters for explicitly setting a project ID and using your JSON credentials file as command line parameters. Here is an example of running the storage test, including both the optional parameters. + ``` - mvn clean compile exec:java -Dexec.mainClass="com.google.gcloud.tests.desktop.GcloudJavaComputeEngineTest" + mvn clean compile exec:java \ + -Dexec.mainClass="com.google.gcloud.tests.commandline.GcloudJavaCommandLineTest" \ + -Dexec.args="storage project-id=my-project-id credentials-file=true" ``` -To run the desktop test app: +To run the command line test app on your desktop: -1. Change "your-app-id" to your project ID in `src/main/java/com/google/gcloud/tests/gcloud-java-desktop-test/GcloudJavaDesktopTest.java`. +1. Change "your-app-id" to your project ID in `src/main/java/com/google/gcloud/tests/gcloud-java-command-line-test/GcloudJavaCommandLineTest.java`. 2. Create JSON service account credentials and save the file in application's base directory as `my-service-account-credentials.json`. -3. Run the app using the command - ``` - mvn clean compile exec:java -Dexec.mainClass="com.google.gcloud.tests.desktop.GcloudJavaDesktopTest" - ``` +3. Run the app using the same command line structure as for Compute Engine (see step 7 under Compute Engine). License ------- diff --git a/test-apps/app-engine-test-app/pom.xml b/test-apps/app-engine-test-app/pom.xml index 3bc2726..4e721ac 100644 --- a/test-apps/app-engine-test-app/pom.xml +++ b/test-apps/app-engine-test-app/pom.xml @@ -76,14 +76,20 @@ org.codehaus.mojo - versions-maven-plugin - 2.1 + build-helper-maven-plugin + 1.7 - compile + add-source + generate-sources - display-dependency-updates + add-source + + + ${basedir}/../shared/src/main/java/com/google/gcloud/tests + + diff --git a/test-apps/app-engine-test-app/src/main/java/com/google/gcloud/tests/appengine/MainServlet.java b/test-apps/app-engine-test-app/src/main/java/com/google/gcloud/tests/appengine/MainServlet.java index 3b21718..0e420df 100644 --- a/test-apps/app-engine-test-app/src/main/java/com/google/gcloud/tests/appengine/MainServlet.java +++ b/test-apps/app-engine-test-app/src/main/java/com/google/gcloud/tests/appengine/MainServlet.java @@ -1,36 +1,30 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.google.gcloud.tests.appengine; -import static java.nio.charset.StandardCharsets.UTF_8; +import static com.google.gcloud.tests.ServiceTests.SUPPORTED_SERVICES; +import static com.google.gcloud.tests.ServiceTests.runAction; import com.google.gcloud.AuthCredentials; -import com.google.gcloud.bigquery.BaseTableInfo; -import com.google.gcloud.bigquery.BigQuery; -import com.google.gcloud.bigquery.BigQueryOptions; -import com.google.gcloud.bigquery.DatasetInfo; -import com.google.gcloud.bigquery.Field; -import com.google.gcloud.bigquery.Schema; -import com.google.gcloud.bigquery.TableId; -import com.google.gcloud.bigquery.TableInfo; -import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; -import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; -import com.google.gcloud.datastore.Entity; -import com.google.gcloud.datastore.Key; -import com.google.gcloud.datastore.KeyFactory; -import com.google.gcloud.storage.Blob; -import com.google.gcloud.storage.BlobId; -import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BucketInfo; -import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageOptions; -import com.google.gcloud.storage.testing.RemoteGcsHelper; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; +import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; @@ -39,83 +33,38 @@ public class MainServlet extends HttpServlet { - private PrintWriter responseWriter; - @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { - responseWriter = resp.getWriter(); - runBigQueryActions(BigQueryOptions.defaultInstance().service()); - ServletContext context = getServletContext(); - InputStream in = - context.getResourceAsStream("/WEB-INF/lib/my-service-account-credentials.json"); - runDatastoreActions( - DatastoreOptions.builder() - .authCredentials(AuthCredentials.createForJson(in)) - .projectId("your-project-id") - .build() - .service()); - runStorageActions(StorageOptions.defaultInstance().service()); - } - - private void runBigQueryActions(BigQuery bigquery) { - responseWriter.println("Testing BigQuery."); - String datasetName = RemoteBigQueryHelper.generateDatasetName(); - bigquery.create(DatasetInfo.builder(datasetName).build()); - TableId tableId = TableId.of(datasetName, "my_table"); - BaseTableInfo info = bigquery.getTable(tableId); - if (info == null) { - responseWriter.println("Creating table " + tableId); - Field integerField = Field.of("fieldName", Field.Type.integer()); - bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); - } - RemoteBigQueryHelper.forceDelete(bigquery, datasetName); - responseWriter.println("Finished BigQuery test."); - } - - private void runDatastoreActions(Datastore datastore) { - responseWriter.println("Testing Datastore."); - KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); - Key key = keyFactory.newKey("myid"); - Entity entity = datastore.get(key); - if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Doe") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); + PrintWriter responseWriter = resp.getWriter(); + String projectId = null; + AuthCredentials credentials = null; + String[] pathInfo = req.getPathInfo().split("/"); + if (pathInfo.length != 2) { + printHelpMessage(responseWriter); } else { - responseWriter.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); + Map params = req.getParameterMap(); + projectId = + params.get("project-id") != null ? ((String[]) params.get("project-id"))[0] : null; + Boolean useCredentialsFile = + params.get("credentials-file") != null + ? Boolean.parseBoolean(((String[]) params.get("credentials-file"))[0]) : null; + if (useCredentialsFile != null) { + ServletContext context = getServletContext(); + InputStream in = + context.getResourceAsStream("/WEB-INF/lib/my-service-account-credentials.json"); + credentials = AuthCredentials.createForJson(in); + } + runAction(pathInfo[1], responseWriter, projectId, credentials); } - responseWriter.println("Finished Datastore test."); } - private void runStorageActions(Storage storage) { - responseWriter.println("Testing Storage."); - String bucketName = RemoteGcsHelper.generateBucketName(); - storage.create(BucketInfo.of(bucketName)); - BlobId blobId = BlobId.of(bucketName, "my_blob"); - Blob blob = Blob.load(storage, blobId); - if (blob == null) { - BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); - storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); - responseWriter.println("Writing a file to Storage."); - } else { - responseWriter.println("Updating content for " + blobId.name()); - byte[] prevContent = blob.content(); - responseWriter.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - try { - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); - } catch (IOException e) { - responseWriter.println(e.toString()); - } - } - responseWriter.println("Finished Storage test."); + private static void printHelpMessage(PrintWriter pw) { + pw.println( + "Specify the service you wish to test in the URL (i.e. http://localhost:8080/datastore). " + + "The following services are supported: " + SUPPORTED_SERVICES.toString() + ". " + + "To explicitly set a project ID, specify the request parameter project-id=my-project-id. " + + "To use a service account credentials file, specify the request parameter " + + "'credentials-file=true'. An example of setting both parameters: " + + "http://localhost:8080/datastore?project-id=my-project-id&credential-file=true"); } } diff --git a/test-apps/app-engine-test-app/src/main/webapp/WEB-INF/web.xml b/test-apps/app-engine-test-app/src/main/webapp/WEB-INF/web.xml index 106c8d1..73b437e 100644 --- a/test-apps/app-engine-test-app/src/main/webapp/WEB-INF/web.xml +++ b/test-apps/app-engine-test-app/src/main/webapp/WEB-INF/web.xml @@ -13,6 +13,6 @@ MainServlet - / + /* diff --git a/test-apps/compute-engine-test-app/pom.xml b/test-apps/command-line-test-app/pom.xml similarity index 72% rename from test-apps/compute-engine-test-app/pom.xml rename to test-apps/command-line-test-app/pom.xml index 383ecdc..c3c24c1 100644 --- a/test-apps/compute-engine-test-app/pom.xml +++ b/test-apps/command-line-test-app/pom.xml @@ -4,7 +4,7 @@ 4.0.0 jar com.google.gcloud.tests - gcloud-java-compute-test + gcloud-java-command-line-test 1 3.1.0 @@ -54,6 +54,25 @@ versions-maven-plugin 2.1 + + org.codehaus.mojo + build-helper-maven-plugin + 1.7 + + + add-source + generate-sources + + add-source + + + + ${basedir}/../shared/src/main/java/com/google/gcloud/tests + + + + + diff --git a/test-apps/command-line-test-app/src/main/java/com/google/gcloud/tests/commandline/GcloudJavaCommandLineTest.java b/test-apps/command-line-test-app/src/main/java/com/google/gcloud/tests/commandline/GcloudJavaCommandLineTest.java new file mode 100644 index 0000000..367cbd3 --- /dev/null +++ b/test-apps/command-line-test-app/src/main/java/com/google/gcloud/tests/commandline/GcloudJavaCommandLineTest.java @@ -0,0 +1,57 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.tests.commandline; + +import static com.google.gcloud.tests.ServiceTests.SUPPORTED_SERVICES; +import static com.google.gcloud.tests.ServiceTests.runAction; + +import com.google.gcloud.AuthCredentials; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.Arrays; + +public class GcloudJavaCommandLineTest { + public static void main(String[] args) throws IOException { + PrintWriter pw = new PrintWriter(System.out, true); + String projectId = null; + AuthCredentials credentials = null; + if (args.length == 0) { + pw.println("Must specify a service to test as the first command line argument. Options are: " + + SUPPORTED_SERVICES.toString()); + } else { + for (String arg : Arrays.copyOfRange(args, 1, args.length)) { + String[] argInfo = arg.split("="); + switch (argInfo[0]) { + case "project-id": + projectId = argInfo.length > 1 ? argInfo[1] : null; + break; + case "credentials-file": + credentials = AuthCredentials.createForJson( + new FileInputStream("my-service-account-credentials.json")); + break; + default: + pw.println("Unrecognized optional argument " + arg + ". Acceptable optional " + + "arguments are 'project-id=[your-project-id]' and " + + "'credentials-file=[true|false]'"); + } + } + runAction(args[0], pw, projectId, credentials); + } + } +} diff --git a/test-apps/compute-engine-test-app/src/main/java/com/google/gcloud/tests/compute/GcloudJavaComputeEngineTest.java b/test-apps/compute-engine-test-app/src/main/java/com/google/gcloud/tests/compute/GcloudJavaComputeEngineTest.java deleted file mode 100644 index 0079319..0000000 --- a/test-apps/compute-engine-test-app/src/main/java/com/google/gcloud/tests/compute/GcloudJavaComputeEngineTest.java +++ /dev/null @@ -1,110 +0,0 @@ -package com.google.gcloud.tests.compute; - -import static java.nio.charset.StandardCharsets.UTF_8; - -import com.google.gcloud.AuthCredentials; -import com.google.gcloud.bigquery.BaseTableInfo; -import com.google.gcloud.bigquery.BigQuery; -import com.google.gcloud.bigquery.BigQueryOptions; -import com.google.gcloud.bigquery.DatasetInfo; -import com.google.gcloud.bigquery.Field; -import com.google.gcloud.bigquery.Schema; -import com.google.gcloud.bigquery.TableId; -import com.google.gcloud.bigquery.TableInfo; -import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; -import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; -import com.google.gcloud.datastore.Entity; -import com.google.gcloud.datastore.Key; -import com.google.gcloud.datastore.KeyFactory; -import com.google.gcloud.storage.Blob; -import com.google.gcloud.storage.BlobId; -import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BucketInfo; -import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageOptions; -import com.google.gcloud.storage.testing.RemoteGcsHelper; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; - -public class GcloudJavaComputeEngineTest { - - public static void main(String[] args) throws FileNotFoundException, IOException { - runBigQueryActions(BigQueryOptions.defaultInstance().service()); - runDatastoreActions( - DatastoreOptions.builder() - .authCredentials(AuthCredentials.createForJson( - new FileInputStream("my-service-account-credentials.json"))) - .projectId("your-project-id") - .build() - .service()); - runStorageActions(StorageOptions.defaultInstance().service()); - } - - private static void runBigQueryActions(BigQuery bigquery) { - System.out.println("Testing BigQuery."); - String datasetName = RemoteBigQueryHelper.generateDatasetName(); - bigquery.create(DatasetInfo.builder(datasetName).build()); - TableId tableId = TableId.of(datasetName, "my_table"); - BaseTableInfo info = bigquery.getTable(tableId); - if (info == null) { - System.out.println("Creating table " + tableId); - Field integerField = Field.of("fieldName", Field.Type.integer()); - bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); - } - RemoteBigQueryHelper.forceDelete(bigquery, datasetName); - System.out.println("Finished BigQuery test."); - } - - private static void runDatastoreActions(Datastore datastore) { - System.out.println("Testing Datastore."); - KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); - Key key = keyFactory.newKey("myid"); - Entity entity = datastore.get(key); - if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Doe") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); - } else { - System.out.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); - } - System.out.println("Finished Datastore test."); - } - - private static void runStorageActions(Storage storage) { - System.out.println("Testing Storage."); - String bucketName = RemoteGcsHelper.generateBucketName(); - storage.create(BucketInfo.of(bucketName)); - BlobId blobId = BlobId.of(bucketName, "my_blob"); - Blob blob = Blob.load(storage, blobId); - if (blob == null) { - BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); - storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); - System.out.println("Writing a file to Storage."); - } else { - System.out.println("Updating content for " + blobId.name()); - byte[] prevContent = blob.content(); - System.out.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - try { - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); - } catch (IOException e) { - System.out.println(e.toString()); - } - } - System.out.println("Finished Storage test."); - } -} diff --git a/test-apps/desktop-test-app/pom.xml b/test-apps/desktop-test-app/pom.xml deleted file mode 100644 index 3251bb1..0000000 --- a/test-apps/desktop-test-app/pom.xml +++ /dev/null @@ -1,59 +0,0 @@ - - - 4.0.0 - jar - com.google.gcloud.tests - gcloud-java-desktop-test - 1 - - 3.1.0 - - - - com.google.gcloud - gcloud-java - 0.1.1 - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.4.0 - - - - exec - - - - - maven - - - - maven-compiler-plugin - 3.1 - - 1.7 - 1.7 - UTF-8 - - - - org.codehaus.mojo - exec-maven-plugin - - false - - - - org.codehaus.mojo - versions-maven-plugin - 2.1 - - - - diff --git a/test-apps/desktop-test-app/src/main/java/com/google/gcloud/tests/desktop/GcloudJavaDesktopTest.java b/test-apps/desktop-test-app/src/main/java/com/google/gcloud/tests/desktop/GcloudJavaDesktopTest.java deleted file mode 100644 index 2bf7d19..0000000 --- a/test-apps/desktop-test-app/src/main/java/com/google/gcloud/tests/desktop/GcloudJavaDesktopTest.java +++ /dev/null @@ -1,125 +0,0 @@ -package com.google.gcloud.tests.desktop; - -import static java.nio.charset.StandardCharsets.UTF_8; - -import com.google.gcloud.AuthCredentials; -import com.google.gcloud.bigquery.BaseTableInfo; -import com.google.gcloud.bigquery.BigQuery; -import com.google.gcloud.bigquery.BigQueryOptions; -import com.google.gcloud.bigquery.DatasetInfo; -import com.google.gcloud.bigquery.Field; -import com.google.gcloud.bigquery.Schema; -import com.google.gcloud.bigquery.TableId; -import com.google.gcloud.bigquery.TableInfo; -import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; -import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; -import com.google.gcloud.datastore.Entity; -import com.google.gcloud.datastore.Key; -import com.google.gcloud.datastore.KeyFactory; -import com.google.gcloud.resourcemanager.ProjectInfo; -import com.google.gcloud.resourcemanager.ResourceManager; -import com.google.gcloud.resourcemanager.ResourceManagerOptions; -import com.google.gcloud.storage.Blob; -import com.google.gcloud.storage.BlobId; -import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BucketInfo; -import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageOptions; -import com.google.gcloud.storage.testing.RemoteGcsHelper; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; -import java.util.Iterator; - -public class GcloudJavaDesktopTest { - - public static void main(String[] args) throws FileNotFoundException, IOException { - runBigQueryActions(BigQueryOptions.defaultInstance().service()); - runDatastoreActions( - DatastoreOptions.builder() - .authCredentials(AuthCredentials.createForJson( - new FileInputStream("my-service-account-credentials.json"))) - .projectId("your-app-id") - .build() - .service()); - runResourceManagerActions(ResourceManagerOptions.defaultInstance().service()); - runStorageActions(StorageOptions.defaultInstance().service()); - } - - private static void runBigQueryActions(BigQuery bigquery) { - System.out.println("Testing BigQuery."); - String datasetName = RemoteBigQueryHelper.generateDatasetName(); - bigquery.create(DatasetInfo.builder(datasetName).build()); - TableId tableId = TableId.of(datasetName, "my_table"); - BaseTableInfo info = bigquery.getTable(tableId); - if (info == null) { - System.out.println("Creating table " + tableId); - Field integerField = Field.of("fieldName", Field.Type.integer()); - bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); - } - RemoteBigQueryHelper.forceDelete(bigquery, datasetName); - System.out.println("Finished BigQuery test."); - } - - private static void runDatastoreActions(Datastore datastore) { - System.out.println("Testing Datastore."); - KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); - Key key = keyFactory.newKey("myid"); - Entity entity = datastore.get(key); - if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Doe") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); - } else { - System.out.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); - } - System.out.println("Finished Datastore test."); - } - - private static void runResourceManagerActions(ResourceManager resourceManager) { - System.out.println("Testing Resource Manager."); - Iterator projectIterator = resourceManager.list().iterateAll(); - System.out.println("Projects I can view:"); - while (projectIterator.hasNext()) { - System.out.println(projectIterator.next().projectId()); - } - System.out.println("Finished Resource Manager test."); - } - - private static void runStorageActions(Storage storage) { - System.out.println("Testing Storage."); - String bucketName = RemoteGcsHelper.generateBucketName(); - storage.create(BucketInfo.of(bucketName)); - BlobId blobId = BlobId.of(bucketName, "my_blob"); - Blob blob = Blob.load(storage, blobId); - if (blob == null) { - BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); - storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); - System.out.println("Writing a file to Storage."); - } else { - System.out.println("Updating content for " + blobId.name()); - byte[] prevContent = blob.content(); - System.out.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - try { - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); - } catch (IOException e) { - System.out.println(e.toString()); - } - } - System.out.println("Finished Storage test."); - } -} diff --git a/test-apps/shared/src/main/java/com/google/gcloud/tests/ServiceTests.java b/test-apps/shared/src/main/java/com/google/gcloud/tests/ServiceTests.java new file mode 100644 index 0000000..269a285 --- /dev/null +++ b/test-apps/shared/src/main/java/com/google/gcloud/tests/ServiceTests.java @@ -0,0 +1,178 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.tests; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.DateTime; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; +import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; +import com.google.gcloud.storage.testing.RemoteGcsHelper; + +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import java.util.Iterator; +import java.util.Set; + +public class ServiceTests { + + public static final Set SUPPORTED_SERVICES = + ImmutableSet.of("bigquery", "datastore", "resourcemanager", "storage"); + + private static void runBigQueryActions(BigQuery bigquery, PrintWriter pw) { + pw.println("Testing BigQuery."); + String datasetName = RemoteBigQueryHelper.generateDatasetName(); + bigquery.create(DatasetInfo.builder(datasetName).build()); + TableId tableId = TableId.of(datasetName, "my_table"); + BaseTableInfo info = bigquery.getTable(tableId); + if (info == null) { + pw.println("Creating table " + tableId); + Field integerField = Field.of("fieldName", Field.Type.integer()); + bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); + } + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); + pw.println("Finished BigQuery test."); + } + + private static void runDatastoreActions(Datastore datastore, PrintWriter pw) { + pw.println("Testing Datastore."); + KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); + Key key = keyFactory.newKey("myid"); + Entity entity = datastore.get(key); + if (entity == null) { + pw.println("Creating entity"); + entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 30) + .set("access_time", DateTime.now()) + .build(); + datastore.put(entity); + } else { + pw.println("Updating access_time for " + entity.getString("name")); + entity = Entity.builder(entity) + .set("access_time", DateTime.now()) + .build(); + datastore.update(entity); + } + pw.println("Finished Datastore test."); + } + + private static void runResourceManagerActions(ResourceManager resourceManager, PrintWriter pw) { + pw.println("Testing Resource Manager."); + Iterator projectIterator = resourceManager.list().iterateAll(); + pw.println("Projects I can view:"); + while (projectIterator.hasNext()) { + pw.println(projectIterator.next().projectId()); + } + pw.println("Finished Resource Manager test."); + } + + private static void runStorageActions(Storage storage, PrintWriter pw) { + pw.println("Testing Storage."); + String bucketName = RemoteGcsHelper.generateBucketName(); + storage.create(BucketInfo.of(bucketName)); + BlobId blobId = BlobId.of(bucketName, "my_blob"); + Blob blob = Blob.load(storage, blobId); + if (blob == null) { + BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); + storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); + pw.println("Writing a file to Storage."); + } else { + pw.println("Updating content for " + blobId.name()); + byte[] prevContent = blob.content(); + pw.println(new String(prevContent, UTF_8)); + WritableByteChannel channel = blob.writer(); + try { + channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); + channel.close(); + } catch (IOException e) { + pw.println(e.toString()); + } + } + pw.println("Finished Storage test."); + } + + public static void runAction( + String service, PrintWriter pw, String projectId, AuthCredentials credentials) { + switch (service.toLowerCase()) { + case "bigquery": + runBigQueryActions( + BigQueryOptions.builder() + .projectId(projectId) + .authCredentials(credentials) + .build() + .service(), + pw); + break; + case "datastore": + runDatastoreActions( + DatastoreOptions.builder() + .projectId(projectId) + .authCredentials(credentials) + .build() + .service(), + pw); + break; + case "resourcemanager": + runResourceManagerActions( + ResourceManagerOptions.builder() + .projectId(projectId) + .authCredentials(credentials) + .build() + .service(), + pw); + break; + case "storage": + runStorageActions( + StorageOptions.builder() + .projectId(projectId) + .authCredentials(credentials) + .build() + .service(), + pw); + break; + default: + pw.println("The service argument " + service + + " is not included in the set of supported services " + SUPPORTED_SERVICES.toString()); + } + } +}