Skip to content

Commit

Permalink
[SPARK-44259][CONNECT][TESTS][FOLLOWUP] No longer initializing `Ammon…
Browse files Browse the repository at this point in the history
…ite` for Java 21

### What changes were proposed in this pull request?
This pr adds a check condition for `beforeAll` function of `ReplE2ESuite`, make the `beforeAll` function No longer initializing Ammonite test with Java 17+.

### Why are the changes needed?
Make `connect-client-jvm` module test pass with Java 21 on GA.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
- Pass GitHub Actions
- Checked with GA

**Before**

- https://github.com/apache/spark/actions/runs/5434602425/jobs/9883143909

```
at java.base/java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:200)
	at java.base/sun.nio.ch.FileChannelImpl.endBlocking(FileChannelImpl.java:172)
	at java.base/sun.nio.ch.FileChannelImpl.size(FileChannelImpl.java:430)
	at jdk.zipfs/jdk.nio.zipfs.ZipFileSystem.findEND(ZipFileSystem.java:1255)
	at jdk.zipfs/jdk.nio.zipfs.ZipFileSystem.initCEN(ZipFileSystem.java:1541)
	at jdk.zipfs/jdk.nio.zipfs.ZipFileSystem.<init>(ZipFileSystem.java:179)
	at jdk.zipfs/jdk.nio.zipfs.ZipFileSystemProvider.getZipFileSystem(ZipFileSystemProvider.java:125)
	at jdk.zipfs/jdk.nio.zipfs.ZipFileSystemProvider.newFileSystem(ZipFileSystemProvider.java:106)
	at java.base/java.nio.file.FileSystems.newFileSystem(FileSystems.java:339)
	at java.base/java.nio.file.FileSystems.newFileSystem(FileSystems.java:288)
	at io.github.retronym.java9rtexport.Export.rt(Export.java:60)
	at io.github.retronym.java9rtexport.Export.rtTo(Export.java:88)
	at io.github.retronym.java9rtexport.Export.rtAt(Export.java:100)
	at io.github.retronym.java9rtexport.Export.rtAt(Export.java:105)
	at ammonite.util.Classpath$.classpath(Classpath.scala:76)
	at ammonite.compiler.CompilerLifecycleManager.init(CompilerLifecycleManager.scala:92)
	at ammonite.compiler.CompilerLifecycleManager.preprocess(CompilerLifecycleManager.scala:64)
	at ammonite.interp.Interpreter.compileRunBlock$1(Interpreter.scala:526)
	at ammonite.interp.Interpreter.$anonfun$processAllScriptBlocks$15(Interpreter.scala:587)
	at ammonite.util.Res$Success.flatMap(Res.scala:62)
	at ammonite.interp.Interpreter.$anonfun$processAllScriptBlocks$14(Interpreter.scala:584)
	at ammonite.util.Res$Success.flatMap(Res.scala:62)
	at ammonite.interp.Interpreter.$anonfun$processAllScriptBlocks$12(Interpreter.scala:581)
	at scala.Option.getOrElse(Option.scala:189)
	at ammonite.interp.Interpreter.loop$1(Interpreter.scala:581)
	at ammonite.interp.Interpreter.processAllScriptBlocks(Interpreter.scala:619)
	at ammonite.interp.Interpreter.$anonfun$processModule$6(Interpreter.scala:414)
	at ammonite.util.Catching.flatMap(Res.scala:115)
	at ammonite.interp.Interpreter.$anonfun$processModule$5(Interpreter.scala:405)
	at ammonite.util.Res$Success.flatMap(Res.scala:62)
	at ammonite.interp.Interpreter.processModule(Interpreter.scala:395)
	at ammonite.interp.Interpreter.$anonfun$initializePredef$3(Interpreter.scala:148)
	at ammonite.interp.Interpreter.$anonfun$initializePredef$3$adapted(Interpreter.scala:148)
	at ammonite.interp.PredefInitialization$.$anonfun$apply$2(PredefInitialization.scala:79)
	at ammonite.util.Res$.$anonfun$fold$1(Res.scala:32)
	at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
	at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
	at scala.collection.immutable.List.foldLeft(List.scala:91)
	at ammonite.util.Res$.fold(Res.scala:30)
	at ammonite.interp.PredefInitialization$.apply(PredefInitialization.scala:67)
	at ammonite.interp.Interpreter.initializePredef(Interpreter.scala:150)
	at ammonite.repl.Repl.initializePredef(Repl.scala:144)
	at ammonite.Main.run(Main.scala:224)
	at org.apache.spark.sql.application.ConnectRepl$.doMain(ConnectRepl.scala:104)
	at org.apache.spark.sql.application.ReplE2ESuite$$anon$1.run(ReplE2ESuite.scala:60)
	at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:572)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
	at java.base/java.lang.Thread.run(Thread.java:1583)
	...
[error] Error during tests:
[error] 	Running java with options -classpath /home/runner/work/spark/spark/connector/connect/client/jvm/target/scala-2.12/test-classes:/home/runner/work/spark/spark/connector/connect/client/jvm/target/scala-2.12/spark-connect-client-jvm_2.12-3.5.0-SNAPSHOT.jar:/home/runner/work/spark/spark/connector/connect/common/target/scala-2.12/spark-connect-common_2.12-3.5.0-
...
[error] (connect-client-jvm / Test / test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 50 s, completed Jul 2, 2023, 4:55:33 AM
[error] running /home/runner/work/spark/spark/build/sbt -Phadoop-3 -Pyarn -Pmesos -Pconnect -Phadoop-cloud -Pkubernetes -Pspark-ganglia-lgpl -Pvolcano sql-kafka-0-10/test connect/test connect-client-jvm/test protobuf/test streaming/test streaming-kafka-0-10/test token-provider-kafka-0-10/test mllib-local/test mllib/test yarn/test network-yarn/test mesos/test kubernetes/test hadoop-cloud/test ; received return code 1
Error: Process completed with exit code 18.
```

The test result was judged as failed on GA.

**After**

- https://github.com/LuciferYang/spark/actions/runs/5439928518/jobs/9892364759

```
[info] Run completed in 10 seconds, 973 milliseconds.
[info] Total number of tests run: 858
[info] Suites: completed 22, aborted 0
[info] Tests: succeeded 858, failed 0, canceled 167, ignored 1, pending 0
[info] All tests passed.
```
<img width="1274" alt="image" src="https://github.com/apache/spark/assets/1475305/8f21a8dc-18b1-4663-9698-27513adbc38d">

Closes #41814 from LuciferYang/SPARK-44259-FOLLOWUP.

Lead-authored-by: yangjie01 <[email protected]>
Co-authored-by: YangJie <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
  • Loading branch information
LuciferYang authored and HyukjinKwon committed Jul 5, 2023
1 parent f30ddff commit 5b81de9
Showing 1 changed file with 22 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import java.util.concurrent.{Executors, Semaphore, TimeUnit}
import scala.util.Properties

import org.apache.commons.io.output.ByteArrayOutputStream
import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.connect.client.util.{IntegrationTestUtils, RemoteSparkSession}
Expand Down Expand Up @@ -50,26 +51,29 @@ class ReplE2ESuite extends RemoteSparkSession with BeforeAndAfterEach {
}

override def beforeAll(): Unit = {
super.beforeAll()
ammoniteOut = new ByteArrayOutputStream()
testSuiteOut = new PipedOutputStream()
// Connect the `testSuiteOut` and `ammoniteIn` pipes
ammoniteIn = new PipedInputStream(testSuiteOut)
errorStream = new ByteArrayOutputStream()

val args = Array("--port", serverPort.toString)
val task = new Runnable {
override def run(): Unit = {
ConnectRepl.doMain(
args = args,
semaphore = Some(semaphore),
inputStream = ammoniteIn,
outputStream = ammoniteOut,
errorStream = errorStream)
// TODO(SPARK-44121) Remove this check condition
if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
super.beforeAll()
ammoniteOut = new ByteArrayOutputStream()
testSuiteOut = new PipedOutputStream()
// Connect the `testSuiteOut` and `ammoniteIn` pipes
ammoniteIn = new PipedInputStream(testSuiteOut)
errorStream = new ByteArrayOutputStream()

val args = Array("--port", serverPort.toString)
val task = new Runnable {
override def run(): Unit = {
ConnectRepl.doMain(
args = args,
semaphore = Some(semaphore),
inputStream = ammoniteIn,
outputStream = ammoniteOut,
errorStream = errorStream)
}
}
}

executorService.submit(task)
executorService.submit(task)
}
}

override def afterAll(): Unit = {
Expand Down

0 comments on commit 5b81de9

Please sign in to comment.