Skip to content

Commit

Permalink
Merge branch 'apache:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
davidyuan1223 authored Sep 28, 2023
2 parents 0925a4b + 99789a8 commit 2beccb6
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 7 deletions.
1 change: 1 addition & 0 deletions .asf.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ github:
- hive
- sql
- kubernetes
- hacktoberfest
enabled_merge_buttons:
squash: true
merge: false
Expand Down
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ NOTICE-binary export-ignore
*.bat text eol=crlf
*.cmd text eol=crlf
*.java text eol=lf
*.md text eol=lf
*.scala text eol=lf
*.xml text eol=lf
*.py text eol=lf
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ object SparkSQLEngine extends Logging {
case i: InterruptedException if !sparkSessionCreated.get =>
error(
s"The Engine main thread was interrupted, possibly due to `createSpark` timeout." +
s" The `kyuubi.session.engine.initialize.timeout` is ($initTimeout ms) " +
s" The `${ENGINE_INIT_TIMEOUT.key}` is ($initTimeout ms) " +
s" and submitted at $submitTime.",
i)
case t: Throwable => error(s"Failed to instantiate SparkSession: ${t.getMessage}", t)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class SparkEngineSuites extends KyuubiFunSuite {
}
assert(SparkSQLEngine.currentEngine.isEmpty)
val errorMsg = s"The Engine main thread was interrupted, possibly due to `createSpark`" +
s" timeout. The `kyuubi.session.engine.initialize.timeout` is ($timeout ms) " +
s" timeout. The `${ENGINE_INIT_TIMEOUT.key}` is ($timeout ms) " +
s" and submitted at $submitTime."
assert(logAppender.loggingEvents.exists(
_.getMessage.getFormattedMessage.equals(errorMsg)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.hadoop.net.NetUtils
import org.apache.kyuubi._
import org.apache.kyuubi.client.util.BatchUtils._
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.FRONTEND_THRIFT_BINARY_BIND_HOST
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.engine.{ApplicationInfo, ApplicationManagerInfo, ApplicationOperation, KubernetesApplicationOperation}
import org.apache.kyuubi.engine.ApplicationState.{FAILED, NOT_FOUND, RUNNING}
import org.apache.kyuubi.engine.spark.SparkProcessBuilder
Expand Down Expand Up @@ -57,9 +57,9 @@ abstract class SparkOnKubernetesSuiteBase
.set("spark.driver.memory", "512M")
.set("spark.kubernetes.driver.request.cores", "250m")
.set("spark.kubernetes.executor.request.cores", "250m")
.set("kyuubi.kubernetes.context", "minikube")
.set("kyuubi.frontend.protocols", "THRIFT_BINARY,REST")
.set("kyuubi.session.engine.initialize.timeout", "PT10M")
.set(KUBERNETES_CONTEXT.key, "minikube")
.set(FRONTEND_PROTOCOLS.key, "THRIFT_BINARY,REST")
.set(ENGINE_INIT_TIMEOUT.key, "PT10M")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ class KyuubiOperationYarnClusterSuite extends WithKyuubiServerOnYarn with HiveJD
"spark.submit.deployMode" -> "cluster",
"spark.sql.defaultCatalog=spark_catalog" -> "spark_catalog",
"spark.sql.catalog.spark_catalog.type" -> "invalid_type",
"kyuubi.session.engine.initialize.timeout" -> "PT10M",
ENGINE_INIT_TIMEOUT.key -> "PT10M",
KYUUBI_BATCH_ID_KEY -> UUID.randomUUID().toString))(Map.empty) {
val startTime = System.currentTimeMillis()
val exception = intercept[Exception] {
Expand Down

0 comments on commit 2beccb6

Please sign in to comment.