diff --git a/dataproc/snippets/src/main/java/Quickstart.java b/dataproc/snippets/src/main/java/Quickstart.java index 698c49db82e..0f54c5f8c81 100644 --- a/dataproc/snippets/src/main/java/Quickstart.java +++ b/dataproc/snippets/src/main/java/Quickstart.java @@ -55,27 +55,6 @@ public class Quickstart { - public static Job waitForJobCompletion( - JobControllerClient jobControllerClient, String projectId, String region, String jobId) { - while (true) { - // Poll the service periodically until the Job is in a finished state. - Job jobInfo = jobControllerClient.getJob(projectId, region, jobId); - switch (jobInfo.getStatus().getState()) { - case DONE: - case CANCELLED: - case ERROR: - return jobInfo; - default: - try { - // Wait a second in between polling attempts. - TimeUnit.SECONDS.sleep(1); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } - } - public static void quickstart( String projectId, String region, String clusterName, String jobFilePath) throws IOException, InterruptedException { @@ -130,16 +109,8 @@ public static void quickstart( Job job = Job.newBuilder().setPlacement(jobPlacement).setPysparkJob(pySparkJob).build(); // Submit an asynchronous request to execute the job. - Job request = jobControllerClient.submitJob(projectId, region, job); - String jobId = request.getReference().getJobId(); - System.out.println(String.format("Submitting job \"%s\"", jobId)); - - // Wait for the job to finish. - System.out.println(String.format("Job %s finished successfully.", jobId)); - OperationFuture submitJobAsOperationAsyncRequest = jobControllerClient.submitJobAsOperationAsync(projectId, region, job); - Job jobResponse = submitJobAsOperationAsyncRequest.get(); // Print output from Google Cloud Storage. diff --git a/dataproc/snippets/src/main/java/SubmitJob.java b/dataproc/snippets/src/main/java/SubmitJob.java index d1f727e8671..667f38638fc 100644 --- a/dataproc/snippets/src/main/java/SubmitJob.java +++ b/dataproc/snippets/src/main/java/SubmitJob.java @@ -17,7 +17,6 @@ // [START dataproc_submit_job] import com.google.api.gax.longrunning.OperationFuture; -import com.google.cloud.dataproc.v1.HadoopJob; import com.google.cloud.dataproc.v1.Job; import com.google.cloud.dataproc.v1.JobControllerClient; import com.google.cloud.dataproc.v1.JobControllerSettings; @@ -28,8 +27,6 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.concurrent.ExecutionException; import java.util.regex.Matcher; import java.util.regex.Pattern; diff --git a/dataproc/snippets/src/test/java/QuickstartTest.java b/dataproc/snippets/src/test/java/QuickstartTest.java index 2a1afa46cd0..0cbc252481c 100644 --- a/dataproc/snippets/src/test/java/QuickstartTest.java +++ b/dataproc/snippets/src/test/java/QuickstartTest.java @@ -92,7 +92,6 @@ public void quickstartTest() throws IOException, InterruptedException { String output = bout.toString(); assertThat(output, CoreMatchers.containsString("Cluster created successfully")); - assertThat(output, CoreMatchers.containsString("Submitting job")); assertThat(output, CoreMatchers.containsString("Job finished successfully:")); assertThat(output, CoreMatchers.containsString("successfully deleted")); }