diff --git a/.github/workflows/dep.yml b/.github/workflows/dep.yml
index f39e5e6a212..15231f47193 100644
--- a/.github/workflows/dep.yml
+++ b/.github/workflows/dep.yml
@@ -36,7 +36,7 @@ jobs:
name: Dependency check
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: setup java
uses: actions/setup-java@v3
with:
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 55cb6b8b16b..804338abec3 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -31,7 +31,7 @@ jobs:
name: sphinx-build
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.9'
diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml
index 55ef485f8fe..f2a1f5c09a0 100644
--- a/.github/workflows/license.yml
+++ b/.github/workflows/license.yml
@@ -34,7 +34,7 @@ jobs:
name: License
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Setup JDK 8
uses: actions/setup-java@v3
with:
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 74c53ab088e..6c12700d831 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -86,7 +86,7 @@ jobs:
env:
SPARK_LOCAL_IP: localhost
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -140,7 +140,7 @@ jobs:
spark:
- '3.4'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -196,7 +196,7 @@ jobs:
flink-archive: '-Dflink.archive.mirror=https://archive.apache.org/dist/flink/flink-1.18.0 -Dflink.archive.name=flink-1.18.0-bin-scala_2.12.tgz'
comment: 'verify-on-flink-1.18-binary'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -242,9 +242,14 @@ jobs:
matrix:
java:
- 8
+ hive-archive: [ "" ]
comment: [ "normal" ]
+ include:
+ - java: 8
+ hive-archive: '-Dhive.archive.mirror=https://archive.apache.org/dist/hive/hive-2.3.9 -Dhive.archive.name=apache-hive-2.3.9-bin.tar.gz'
+ comment: 'verify-on-hive-2.3-binary'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -261,8 +266,15 @@ jobs:
- name: Build and test Hive with maven w/o linters
run: |
TEST_MODULES="externals/kyuubi-hive-sql-engine,integration-tests/kyuubi-hive-it"
- ./build/mvn ${MVN_OPT} -pl ${TEST_MODULES} -am clean install -DskipTests
- ./build/mvn ${MVN_OPT} -pl ${TEST_MODULES} test
+ ./build/mvn ${MVN_OPT} ${{ matrix.hive-archive }} -pl ${TEST_MODULES} -am clean install -DskipTests
+ # Hive 2.3.9 ships Derby 10.10.2.0, which may fail to boostrap on latest JDK 8
+ # https://github.com/apache/hive/pull/4895
+ if [[ "${{ matrix.hive-archive }}" == *apache-hive-2.3.9-bin.tar.gz* ]]; then
+ HIVE_239_LIB="$PWD/externals/kyuubi-download/target/apache-hive-2.3.9-bin/lib"
+ rm $HIVE_239_LIB/derby-*
+ wget https://repo1.maven.org/maven2/org/apache/derby/derby/10.14.2.0/derby-10.14.2.0.jar -P $HIVE_239_LIB
+ fi
+ ./build/mvn ${MVN_OPT} ${{ matrix.hive-archive }} -pl ${TEST_MODULES} test
- name: Upload test logs
if: failure()
uses: actions/upload-artifact@v3
@@ -283,7 +295,7 @@ jobs:
- 11
comment: [ "normal" ]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -322,7 +334,7 @@ jobs:
- 11
comment: [ "normal" ]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
@@ -356,7 +368,7 @@ jobs:
env:
SPARK_LOCAL_IP: localhost
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK 8
@@ -383,12 +395,12 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
# https://github.com/docker/build-push-action
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
- name: Build Kyuubi Docker Image
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v5
with:
# passthrough CI into build container
build-args: |
@@ -455,7 +467,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Cache Engine Archives
uses: ./.github/actions/cache-engine-archives
- name: Setup Minikube
@@ -502,7 +514,7 @@ jobs:
zookeeper: ["3.4", "3.5", "3.6", "3.7" ]
comment: [ "normal" ]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK ${{ matrix.java }}
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index 5ff634da6d8..b06858abdc7 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -33,7 +33,7 @@ jobs:
env:
SPARK_LOCAL_IP: localhost
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Setup JDK 8
diff --git a/.github/workflows/publish-snapshot-docker.yml b/.github/workflows/publish-snapshot-docker.yml
index 3afccee7aa8..0a73dcc2da5 100644
--- a/.github/workflows/publish-snapshot-docker.yml
+++ b/.github/workflows/publish-snapshot-docker.yml
@@ -28,18 +28,18 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Kyuubi Docker Image
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
# build cache on Github Actions, See: https://docs.docker.com/build/cache/backends/gha/#using-dockerbuild-push-action
cache-from: type=gha
diff --git a/.github/workflows/publish-snapshot-nexus.yml b/.github/workflows/publish-snapshot-nexus.yml
index b4191396b1f..3b8280e00f3 100644
--- a/.github/workflows/publish-snapshot-nexus.yml
+++ b/.github/workflows/publish-snapshot-nexus.yml
@@ -43,7 +43,7 @@ jobs:
profiles: -Pflink-provided,spark-provided,hive-provided,spark-3.4
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: Setup JDK 8
diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index 87823ddbd20..80c819b3930 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -37,7 +37,7 @@ jobs:
- '-Pflink-provided,hive-provided,spark-provided,spark-block-cleaner,spark-3.5,spark-3.4,spark-3.3,spark-3.2,tpcds,kubernetes-it'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup JDK 8
@@ -114,7 +114,7 @@ jobs:
name: Super Linter and Shellcheck
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Super Linter Checks
uses: github/super-linter/slim@v5
env:
diff --git a/.github/workflows/web-ui.yml b/.github/workflows/web-ui.yml
index 9de7a599d45..868defcf4e2 100644
--- a/.github/workflows/web-ui.yml
+++ b/.github/workflows/web-ui.yml
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup JDK 8
uses: actions/setup-java@v3
with:
diff --git a/docs/configuration/settings.md b/docs/configuration/settings.md
index 435416d0bdf..580ad170055 100644
--- a/docs/configuration/settings.md
+++ b/docs/configuration/settings.md
@@ -150,7 +150,7 @@ You can configure the Kyuubi properties in `$KYUUBI_HOME/conf/kyuubi-defaults.co
| kyuubi.engine.jdbc.connection.password | <undefined> | The password is used for connecting to server | string | 1.6.0 |
| kyuubi.engine.jdbc.connection.propagateCredential | false | Whether to use the session's user and password to connect to database | boolean | 1.8.0 |
| kyuubi.engine.jdbc.connection.properties || The additional properties are used for connecting to server | seq | 1.6.0 |
-| kyuubi.engine.jdbc.connection.provider | <undefined> | The connection provider is used for getting a connection from the server | string | 1.6.0 |
+| kyuubi.engine.jdbc.connection.provider | <undefined> | A JDBC connection provider plugin for the Kyuubi Server to establish a connection to the JDBC URL. The configuration value should be a subclass of `org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider`. Kyuubi provides the following built-in implementations:
doris: For establishing Doris connections. mysql: For establishing MySQL connections. phoenix: For establishing Phoenix connections. postgresql: For establishing PostgreSQL connections. | string | 1.6.0 |
| kyuubi.engine.jdbc.connection.url | <undefined> | The server url that engine will connect to | string | 1.6.0 |
| kyuubi.engine.jdbc.connection.user | <undefined> | The user is used for connecting to server | string | 1.6.0 |
| kyuubi.engine.jdbc.driver.class | <undefined> | The driver class for JDBC engine connection | string | 1.6.0 |
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
index c45e638902d..3918186ac80 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
+++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
@@ -4,7 +4,8 @@
"fieldName" : "child",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "ALTERDATABASE",
"uriDescs" : [ ]
@@ -14,26 +15,31 @@
"fieldName" : "name",
"fieldExtractor" : "ResolvedDBObjectNameDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "namespace",
"fieldExtractor" : "StringSeqDatabaseExtractor",
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "name",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "CREATEDATABASE",
"uriDescs" : [ {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.DescribeNamespace",
@@ -41,7 +47,8 @@
"fieldName" : "namespace",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "DESCDATABASE",
"uriDescs" : [ ]
@@ -51,7 +58,8 @@
"fieldName" : "namespace",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "DROPDATABASE",
"uriDescs" : [ ]
@@ -61,20 +69,24 @@
"fieldName" : "child",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
}, {
"fieldName" : "child",
"fieldExtractor" : "ResolvedDBObjectNameDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
}, {
"fieldName" : "namespace",
"fieldExtractor" : "StringSeqOptionDatabaseExtractor",
"catalogDesc" : {
"fieldName" : "catalogName",
- "fieldExtractor" : "StringOptionCatalogExtractor"
+ "fieldExtractor" : "StringOptionCatalogExtractor",
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "SWITCHDATABASE",
"uriDescs" : [ ]
@@ -84,13 +96,15 @@
"fieldName" : "namespace",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "ALTERDATABASE_LOCATION",
"uriDescs" : [ {
"fieldName" : "location",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.SetNamespaceProperties",
@@ -98,7 +112,8 @@
"fieldName" : "namespace",
"fieldExtractor" : "ResolvedNamespaceDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "ALTERDATABASE",
"uriDescs" : [ ]
@@ -108,7 +123,8 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "ALTERDATABASE",
"uriDescs" : [ ]
@@ -118,13 +134,15 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "ALTERDATABASE_LOCATION",
"uriDescs" : [ {
"fieldName" : "location",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AnalyzeTablesCommand",
@@ -132,7 +150,8 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringOptionDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "ANALYZE_TABLE",
"uriDescs" : [ ]
@@ -142,13 +161,15 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "CREATEDATABASE",
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.DescribeDatabaseCommand",
@@ -156,7 +177,8 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "DESCDATABASE",
"uriDescs" : [ ]
@@ -166,7 +188,8 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "DROPDATABASE",
"uriDescs" : [ ]
@@ -176,7 +199,8 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "SWITCHDATABASE",
"uriDescs" : [ ]
@@ -186,8 +210,9 @@
"fieldName" : "namespace",
"fieldExtractor" : "StringSeqDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "SWITCHDATABASE",
"uriDescs" : [ ]
-} ]
\ No newline at end of file
+} ]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/function_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/function_command_spec.json
index 0b71245d218..8644f9860ca 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/resources/function_command_spec.json
+++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/function_command_spec.json
@@ -7,9 +7,11 @@
"functionTypeDesc" : {
"fieldName" : "isTemp",
"fieldExtractor" : "TempMarkerFunctionTypeExtractor",
- "skipTypes" : [ "TEMP" ]
+ "skipTypes" : [ "TEMP" ],
+ "comment" : ""
},
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "functionName",
"fieldExtractor" : "StringFunctionExtractor",
@@ -17,14 +19,17 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringOptionDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
},
"functionTypeDesc" : {
"fieldName" : "isTemp",
"fieldExtractor" : "TempMarkerFunctionTypeExtractor",
- "skipTypes" : [ "TEMP" ]
+ "skipTypes" : [ "TEMP" ],
+ "comment" : ""
},
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "CREATEFUNCTION"
}, {
@@ -36,9 +41,11 @@
"functionTypeDesc" : {
"fieldName" : "info",
"fieldExtractor" : "ExpressionInfoFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
}, {
"fieldName" : "functionName",
"fieldExtractor" : "FunctionIdentifierFunctionExtractor",
@@ -46,9 +53,11 @@
"functionTypeDesc" : {
"fieldName" : "functionName",
"fieldExtractor" : "FunctionIdentifierFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"opType" : "DESCFUNCTION"
}, {
@@ -60,9 +69,11 @@
"functionTypeDesc" : {
"fieldName" : "isTemp",
"fieldExtractor" : "TempMarkerFunctionTypeExtractor",
- "skipTypes" : [ "TEMP" ]
+ "skipTypes" : [ "TEMP" ],
+ "comment" : ""
},
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "functionName",
"fieldExtractor" : "StringFunctionExtractor",
@@ -70,14 +81,17 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringOptionDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
},
"functionTypeDesc" : {
"fieldName" : "isTemp",
"fieldExtractor" : "TempMarkerFunctionTypeExtractor",
- "skipTypes" : [ "TEMP" ]
+ "skipTypes" : [ "TEMP" ],
+ "comment" : ""
},
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "DROPFUNCTION"
}, {
@@ -89,10 +103,12 @@
"fieldName" : "databaseName",
"fieldExtractor" : "StringOptionDatabaseExtractor",
"catalogDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
},
"functionTypeDesc" : null,
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ],
"opType" : "RELOADFUNCTION"
-} ]
\ No newline at end of file
+} ]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/scan_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/scan_command_spec.json
index 75510a57775..ba4d790e84c 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/resources/scan_command_spec.json
+++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/scan_command_spec.json
@@ -3,7 +3,8 @@
"scanDescs" : [ {
"fieldName" : "catalogTable",
"fieldExtractor" : "CatalogTableTableExtractor",
- "catalogDesc" : null
+ "catalogDesc" : null,
+ "comment" : ""
} ],
"functionDescs" : [ ],
"uriDescs" : [ ]
@@ -12,7 +13,8 @@
"scanDescs" : [ {
"fieldName" : "tableMeta",
"fieldExtractor" : "CatalogTableTableExtractor",
- "catalogDesc" : null
+ "catalogDesc" : null,
+ "comment" : ""
} ],
"functionDescs" : [ ],
"uriDescs" : [ ]
@@ -21,20 +23,23 @@
"scanDescs" : [ {
"fieldName" : "catalogTable",
"fieldExtractor" : "CatalogTableOptionTableExtractor",
- "catalogDesc" : null
+ "catalogDesc" : null,
+ "comment" : ""
} ],
"functionDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "relation",
"fieldExtractor" : "BaseRelationFileIndexURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation",
"scanDescs" : [ {
"fieldName" : null,
"fieldExtractor" : "DataSourceV2RelationTableExtractor",
- "catalogDesc" : null
+ "catalogDesc" : null,
+ "comment" : ""
} ],
"functionDescs" : [ ],
"uriDescs" : [ ]
@@ -48,9 +53,11 @@
"functionTypeDesc" : {
"fieldName" : "name",
"fieldExtractor" : "FunctionNameFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -63,9 +70,11 @@
"functionTypeDesc" : {
"fieldName" : "name",
"fieldExtractor" : "FunctionNameFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -78,9 +87,11 @@
"functionTypeDesc" : {
"fieldName" : "name",
"fieldExtractor" : "FunctionNameFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -93,9 +104,11 @@
"functionTypeDesc" : {
"fieldName" : "name",
"fieldExtractor" : "FunctionNameFunctionTypeExtractor",
- "skipTypes" : [ "TEMP", "SYSTEM" ]
+ "skipTypes" : [ "TEMP", "SYSTEM" ],
+ "comment" : ""
},
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ],
"uriDescs" : [ ]
-} ]
\ No newline at end of file
+} ]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index eaaa6203d6b..27b205309cf 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -8,7 +8,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
@@ -27,7 +28,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDPARTS",
"queryDescs" : [ ],
@@ -42,7 +44,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
@@ -61,7 +64,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -79,22 +83,26 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "INSERT"
+ "actionType" : "INSERT",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "DataSourceV2RelationURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CacheTable",
@@ -102,7 +110,8 @@
"opType" : "CREATEVIEW",
"queryDescs" : [ {
"fieldName" : "table",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -111,7 +120,8 @@
"opType" : "CREATEVIEW",
"queryDescs" : [ {
"fieldName" : "plan",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -124,7 +134,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -139,7 +150,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierTableExtractor",
@@ -148,10 +160,12 @@
"tableTypeDesc" : null,
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "child",
"fieldExtractor" : "ResolvedDbObjectNameTableExtractor",
@@ -160,22 +174,26 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateTableAsSelect",
@@ -187,7 +205,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierTableExtractor",
@@ -196,10 +215,12 @@
"tableTypeDesc" : null,
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "name",
"fieldExtractor" : "ResolvedDbObjectNameTableExtractor",
@@ -208,21 +229,25 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateV2Table",
@@ -234,21 +259,25 @@
"tableTypeDesc" : null,
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable",
@@ -259,12 +288,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -279,7 +310,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "DESCTABLE",
"queryDescs" : [ ],
@@ -294,7 +326,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
@@ -313,7 +346,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_DROPPARTS",
"queryDescs" : [ ],
@@ -328,7 +362,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "child",
"fieldExtractor" : "ResolvedTableTableExtractor",
@@ -337,7 +372,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "DROPTABLE",
"queryDescs" : [ ],
@@ -351,17 +387,20 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "sourceTable",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -373,22 +412,26 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "INSERT_OVERWRITE"
+ "actionType" : "INSERT_OVERWRITE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "DataSourceV2RelationURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.OverwritePartitionsDynamic",
@@ -399,22 +442,26 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "INSERT_OVERWRITE"
+ "actionType" : "INSERT_OVERWRITE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "DataSourceV2RelationURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.RefreshTable",
@@ -426,7 +473,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -441,7 +489,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_RENAMECOL",
"queryDescs" : [ ],
@@ -460,7 +509,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_RENAMEPART",
"queryDescs" : [ ],
@@ -475,7 +525,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "MSCK",
"queryDescs" : [ ],
@@ -490,7 +541,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_REPLACECOLS",
"queryDescs" : [ ],
@@ -508,17 +560,20 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -531,7 +586,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierTableExtractor",
@@ -540,10 +596,12 @@
"tableTypeDesc" : null,
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "child",
"fieldExtractor" : "ResolvedDbObjectNameTableExtractor",
@@ -552,22 +610,26 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceTableAsSelect",
@@ -579,7 +641,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableName",
"fieldExtractor" : "IdentifierTableExtractor",
@@ -588,10 +651,12 @@
"tableTypeDesc" : null,
"catalogDesc" : {
"fieldName" : "catalog",
- "fieldExtractor" : "CatalogPluginCatalogExtractor"
+ "fieldExtractor" : "CatalogPluginCatalogExtractor",
+ "comment" : ""
},
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "name",
"fieldExtractor" : "ResolvedDbObjectNameTableExtractor",
@@ -600,21 +665,25 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "tableSpec",
"fieldExtractor" : "TableSpecURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
}, {
"fieldName" : "properties",
"fieldExtractor" : "PropertiesLocationUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.SetTableProperties",
@@ -626,7 +695,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -645,7 +715,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOW_CREATETABLE",
"queryDescs" : [ ],
@@ -660,7 +731,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOW_TBLPROPERTIES",
"queryDescs" : [ ],
@@ -675,7 +747,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_DROPPARTS",
"queryDescs" : [ ],
@@ -690,7 +763,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "TRUNCATETABLE",
"queryDescs" : [ ],
@@ -704,12 +778,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -722,7 +798,8 @@
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddFileCommand",
@@ -732,7 +809,8 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddFilesCommand",
@@ -742,7 +820,8 @@
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddJarCommand",
@@ -752,7 +831,8 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AddJarsCommand",
@@ -762,7 +842,8 @@
"uriDescs" : [ {
"fieldName" : "paths",
"fieldExtractor" : "StringSeqURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AlterTableAddColumnsCommand",
@@ -771,13 +852,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "colsToAdd",
- "fieldExtractor" : "StructFieldSeqColumnExtractor"
+ "fieldExtractor" : "StructFieldSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
@@ -789,20 +872,23 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpecsAndLocs",
- "fieldExtractor" : "PartitionLocsSeqColumnExtractor"
+ "fieldExtractor" : "PartitionLocsSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_ADDPARTS",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "partitionSpecsAndLocs",
"fieldExtractor" : "PartitionLocsSeqURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand",
@@ -811,13 +897,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "columnName",
- "fieldExtractor" : "StringColumnExtractor"
+ "fieldExtractor" : "StringColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_REPLACECOLS",
"queryDescs" : [ ],
@@ -829,13 +917,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "specs",
- "fieldExtractor" : "PartitionSeqColumnExtractor"
+ "fieldExtractor" : "PartitionSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_DROPPARTS",
"queryDescs" : [ ],
@@ -850,7 +940,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "MSCK",
"queryDescs" : [ ],
@@ -865,11 +956,13 @@
"tableTypeDesc" : {
"fieldName" : "oldName",
"fieldExtractor" : "TableIdentifierTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW" ],
+ "comment" : ""
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_RENAME",
"queryDescs" : [ ],
@@ -881,13 +974,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "oldPartition",
- "fieldExtractor" : "PartitionColumnExtractor"
+ "fieldExtractor" : "PartitionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_RENAMEPART",
"queryDescs" : [ ],
@@ -899,13 +994,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partSpec",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_SERDEPROPERTIES",
"queryDescs" : [ ],
@@ -917,20 +1014,23 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpec",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_LOCATION",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "location",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.AlterTableSetPropertiesCommand",
@@ -942,7 +1042,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -957,7 +1058,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -972,16 +1074,19 @@
"tableTypeDesc" : {
"fieldName" : "name",
"fieldExtractor" : "TableIdentifierTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW" ],
+ "comment" : ""
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERVIEW_AS",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -994,31 +1099,36 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "columnNames",
- "fieldExtractor" : "StringSeqColumnExtractor"
+ "fieldExtractor" : "StringSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "columnNames",
- "fieldExtractor" : "StringSeqOptionColumnExtractor"
+ "fieldExtractor" : "StringSeqOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -1033,19 +1143,22 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpec",
- "fieldExtractor" : "PartitionColumnExtractor"
+ "fieldExtractor" : "PartitionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -1060,7 +1173,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
}, {
"fieldName" : "tableIdent",
"fieldExtractor" : "TableIdentifierTableExtractor",
@@ -1069,7 +1183,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -1080,7 +1195,8 @@
"opType" : "CREATEVIEW",
"queryDescs" : [ {
"fieldName" : "plan",
- "fieldExtractor" : "LogicalPlanOptionQueryExtractor"
+ "fieldExtractor" : "LogicalPlanOptionQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1093,17 +1209,20 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateDataSourceTableCommand",
@@ -1115,14 +1234,16 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateTableCommand",
@@ -1134,14 +1255,16 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "table",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateTableLikeCommand",
@@ -1153,7 +1276,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
}, {
"fieldName" : "sourceTable",
"fieldExtractor" : "TableIdentifierTableExtractor",
@@ -1162,14 +1286,16 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "fileFormat",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateViewCommand",
@@ -1181,19 +1307,23 @@
"tableTypeDesc" : {
"fieldName" : "viewType",
"fieldExtractor" : "ViewTypeTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW", "GLOBAL_TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW", "GLOBAL_TEMP_VIEW" ],
+ "comment" : ""
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATEVIEW",
"queryDescs" : [ {
"fieldName" : "plan",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
}, {
"fieldName" : "child",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1203,13 +1333,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "colNameParts",
- "fieldExtractor" : "StringSeqLastColumnExtractor"
+ "fieldExtractor" : "StringSeqLastColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "DESCTABLE",
"queryDescs" : [ ],
@@ -1221,13 +1353,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpec",
- "fieldExtractor" : "PartitionColumnExtractor"
+ "fieldExtractor" : "PartitionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "DESCTABLE",
"queryDescs" : [ ],
@@ -1242,11 +1376,13 @@
"tableTypeDesc" : {
"fieldName" : "tableName",
"fieldExtractor" : "TableIdentifierTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW" ],
+ "comment" : ""
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "DROPTABLE",
"queryDescs" : [ ],
@@ -1257,12 +1393,14 @@
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "storage",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.LoadDataCommand",
@@ -1271,24 +1409,28 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partition",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : {
"fieldName" : "isOverwrite",
"fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
- "actionType" : null
+ "actionType" : null,
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "LOAD",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.command.RefreshTableCommand",
@@ -1300,7 +1442,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1315,7 +1458,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "MSCK",
"queryDescs" : [ ],
@@ -1330,7 +1474,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOWCOLUMNS",
"queryDescs" : [ ],
@@ -1345,7 +1490,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOW_CREATETABLE",
"queryDescs" : [ ],
@@ -1360,7 +1506,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOW_CREATETABLE",
"queryDescs" : [ ],
@@ -1372,13 +1519,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "spec",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOWPARTITIONS",
"queryDescs" : [ ],
@@ -1393,7 +1542,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "SHOW_TBLPROPERTIES",
"queryDescs" : [ ],
@@ -1405,13 +1555,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpec",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "TRUNCATETABLE",
"queryDescs" : [ ],
@@ -1426,17 +1578,20 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanOptionQueryExtractor"
+ "fieldExtractor" : "LogicalPlanOptionQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.execution.datasources.CreateTempViewUsing",
@@ -1453,17 +1608,20 @@
"actionTypeDesc" : {
"fieldName" : "overwrite",
"fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
- "actionType" : null
+ "actionType" : null,
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1473,22 +1631,26 @@
"fieldExtractor" : "CatalogTableOptionTableExtractor",
"columnDesc" : {
"fieldName" : "outputColumnNames",
- "fieldExtractor" : "StringSeqColumnExtractor"
+ "fieldExtractor" : "StringSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : {
"fieldName" : "mode",
"fieldExtractor" : "SaveModeActionTypeExtractor",
- "actionType" : null
+ "actionType" : null,
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1501,7 +1663,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1512,12 +1675,14 @@
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "options",
"fieldExtractor" : "PropertiesPathUriExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand",
@@ -1526,23 +1691,27 @@
"fieldExtractor" : "CatalogTableTableExtractor",
"columnDesc" : {
"fieldName" : "outputColumnNames",
- "fieldExtractor" : "StringSeqColumnExtractor"
+ "fieldExtractor" : "StringSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
@@ -1550,12 +1719,14 @@
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "storage",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveTable",
@@ -1564,22 +1735,26 @@
"fieldExtractor" : "CatalogTableTableExtractor",
"columnDesc" : {
"fieldName" : "outputColumnNames",
- "fieldExtractor" : "StringSeqColumnExtractor"
+ "fieldExtractor" : "StringSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : {
"fieldName" : "overwrite",
"fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
- "actionType" : null
+ "actionType" : null,
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1589,23 +1764,27 @@
"fieldExtractor" : "CatalogTableTableExtractor",
"columnDesc" : {
"fieldName" : "outputColumnNames",
- "fieldExtractor" : "StringSeqColumnExtractor"
+ "fieldExtractor" : "StringSeqColumnExtractor",
+ "comment" : ""
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ {
"fieldName" : "tableDesc",
"fieldExtractor" : "CatalogTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : ""
} ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
@@ -1617,7 +1796,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Iceberg"
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -1631,12 +1811,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Iceberg"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1650,17 +1832,20 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Iceberg"
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "sourceTable",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1672,17 +1857,20 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Iceberg"
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "sourceTable",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1694,12 +1882,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Iceberg"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1711,13 +1901,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "colsToAdd",
- "fieldExtractor" : "StructFieldSeqColumnExtractor"
+ "fieldExtractor" : "StructFieldSeqColumnExtractor",
+ "comment" : "Hudi"
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_ADDCOLS",
"queryDescs" : [ ],
@@ -1729,13 +1921,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "columnName",
- "fieldExtractor" : "StringColumnExtractor"
+ "fieldExtractor" : "StringColumnExtractor",
+ "comment" : "Hudi"
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_REPLACECOLS",
"queryDescs" : [ ],
@@ -1747,13 +1941,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpecs",
- "fieldExtractor" : "PartitionSeqColumnExtractor"
+ "fieldExtractor" : "PartitionSeqColumnExtractor",
+ "comment" : "Hudi"
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_DROPPARTS",
"queryDescs" : [ ],
@@ -1768,11 +1964,13 @@
"tableTypeDesc" : {
"fieldName" : "oldName",
"fieldExtractor" : "TableIdentifierTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW" ],
+ "comment" : "Hudi"
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_RENAME",
"queryDescs" : [ ],
@@ -1787,7 +1985,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -1801,12 +2000,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "OTHER"
+ "actionType" : "OTHER",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
}, {
"fieldName" : "clone",
"fieldExtractor" : "HudiCallProcedureOutputTableExtractor",
@@ -1814,12 +2015,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : ""
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : ""
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1832,7 +2035,8 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Hudi"
} ]
}, {
"classname" : "org.apache.spark.sql.hudi.command.CompactionHoodieTableCommand",
@@ -1844,7 +2048,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
@@ -1857,7 +2062,8 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : true
+ "isInput" : true,
+ "comment" : "Hudi"
} ]
}, {
"classname" : "org.apache.spark.sql.hudi.command.CompactionShowHoodieTableCommand",
@@ -1869,7 +2075,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "SHOW_TBLPROPERTIES",
"queryDescs" : [ ],
@@ -1884,12 +2091,14 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "CREATETABLE_AS_SELECT",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -1902,7 +2111,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
@@ -1917,7 +2127,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : "Hudi"
}, {
"fieldName" : "sourceTable",
"fieldExtractor" : "TableIdentifierTableExtractor",
@@ -1926,7 +2137,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : true
+ "setCurrentDatabaseIfMissing" : true,
+ "comment" : "Hudi"
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
@@ -1941,7 +2153,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "CREATEINDEX",
"queryDescs" : [ ],
@@ -1955,12 +2168,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Hudi"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -1975,11 +2190,13 @@
"tableTypeDesc" : {
"fieldName" : "tableIdentifier",
"fieldExtractor" : "TableIdentifierTableTypeExtractor",
- "skipTypes" : [ "TEMP_VIEW" ]
+ "skipTypes" : [ "TEMP_VIEW" ],
+ "comment" : "Hudi"
},
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "DROPTABLE",
"queryDescs" : [ ],
@@ -1994,7 +2211,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "DROPINDEX",
"queryDescs" : [ ],
@@ -2008,17 +2226,20 @@
"actionTypeDesc" : {
"fieldName" : "overwrite",
"fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
- "actionType" : null
+ "actionType" : null,
+ "comment" : "Hudi"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "query",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : ""
} ],
"uriDescs" : [ ]
}, {
@@ -2030,17 +2251,20 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Hudi"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "mergeInto",
- "fieldExtractor" : "HudiMergeIntoSourceTableExtractor"
+ "fieldExtractor" : "HudiMergeIntoSourceTableExtractor",
+ "comment" : "Hudi"
} ],
"uriDescs" : [ ]
}, {
@@ -2053,7 +2277,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERINDEX_REBUILD",
"queryDescs" : [ ],
@@ -2068,7 +2293,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : ""
} ],
"opType" : "MSCK",
"queryDescs" : [ ],
@@ -2080,13 +2306,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "specOpt",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : "Hudi"
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "SHOWPARTITIONS",
"queryDescs" : [ ],
@@ -2101,7 +2329,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : true,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "SHOWINDEXES",
"queryDescs" : [ ],
@@ -2116,7 +2345,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "ALTERTABLE_PROPERTIES",
"queryDescs" : [ ],
@@ -2128,13 +2358,15 @@
"fieldExtractor" : "TableIdentifierTableExtractor",
"columnDesc" : {
"fieldName" : "partitionSpec",
- "fieldExtractor" : "PartitionOptionColumnExtractor"
+ "fieldExtractor" : "PartitionOptionColumnExtractor",
+ "comment" : "Hudi"
},
"actionTypeDesc" : null,
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "TRUNCATETABLE",
"queryDescs" : [ ],
@@ -2148,12 +2380,14 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Hudi"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Hudi"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
@@ -2168,7 +2402,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "table",
"fieldExtractor" : "TableIdentifierOptionTableExtractor",
@@ -2177,22 +2412,26 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
} ],
"opType" : "MSCK",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "child",
"fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "table",
"fieldExtractor" : "TableIdentifierOptionURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.DeleteCommand",
@@ -2203,19 +2442,22 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Delta"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "target",
"fieldExtractor" : "SubqueryAliasURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.MergeIntoCommand",
@@ -2226,22 +2468,26 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Delta"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
} ],
"opType" : "QUERY",
"queryDescs" : [ {
"fieldName" : "source",
- "fieldExtractor" : "LogicalPlanQueryExtractor"
+ "fieldExtractor" : "LogicalPlanQueryExtractor",
+ "comment" : "Delta"
} ],
"uriDescs" : [ {
"fieldName" : "target",
"fieldExtractor" : "SubqueryAliasURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.OptimizeTableCommand",
@@ -2253,7 +2499,8 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "tableId",
"fieldExtractor" : "TableIdentifierOptionTableExtractor",
@@ -2262,22 +2509,26 @@
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
} ],
"opType" : "ALTERTABLE_COMPACT",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "child",
"fieldExtractor" : "ResolvedTableURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "tableId",
"fieldExtractor" : "TableIdentifierOptionURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
}, {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
} ]
}, {
"classname" : "org.apache.spark.sql.delta.commands.UpdateCommand",
@@ -2288,18 +2539,21 @@
"actionTypeDesc" : {
"fieldName" : null,
"fieldExtractor" : null,
- "actionType" : "UPDATE"
+ "actionType" : "UPDATE",
+ "comment" : "Delta"
},
"tableTypeDesc" : null,
"catalogDesc" : null,
"isInput" : false,
- "setCurrentDatabaseIfMissing" : false
+ "setCurrentDatabaseIfMissing" : false,
+ "comment" : "Delta"
} ],
"opType" : "QUERY",
"queryDescs" : [ ],
"uriDescs" : [ {
"fieldName" : "target",
"fieldExtractor" : "SubqueryAliasURIExtractor",
- "isInput" : false
+ "isInput" : false,
+ "comment" : "Delta"
} ]
-} ]
\ No newline at end of file
+} ]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 81dfa6e4ecd..d0f6e48ebe3 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -29,6 +29,7 @@ import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization._
import org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMarker
+import org.apache.kyuubi.plugin.spark.authz.rule.rowfilter._
import org.apache.kyuubi.plugin.spark.authz.serde._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
@@ -303,6 +304,14 @@ object PrivilegesBuilder {
val inputObjs = new ArrayBuffer[PrivilegeObject]
val outputObjs = new ArrayBuffer[PrivilegeObject]
val opType = plan match {
+ case ObjectFilterPlaceHolder(child) if child.nodeName == "ShowTables" =>
+ OperationType.SHOWTABLES
+ case ObjectFilterPlaceHolder(child) if child.nodeName == "ShowNamespaces" =>
+ OperationType.SHOWDATABASES
+ case _: FilteredShowTablesCommand => OperationType.SHOWTABLES
+ case _: FilteredShowFunctionsCommand => OperationType.SHOWFUNCTIONS
+ case _: FilteredShowColumnsCommand => OperationType.SHOWCOLUMNS
+
// ExplainCommand run will execute the plan, should avoid check privilege for the plan.
case _: ExplainCommand =>
setExplainCommandExecutionId(spark)
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
index afb4f7c54fd..c5c39c51197 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
@@ -35,10 +35,6 @@ class RuleAuthorization(spark: SparkSession) extends Authorization(spark) {
val ugi = getAuthzUgi(spark.sparkContext)
val (inputs, outputs, opType) = PrivilegesBuilder.build(plan, spark)
val requests = new ArrayBuffer[AccessRequest]()
- if (inputs.isEmpty && opType == OperationType.SHOWDATABASES) {
- val resource = AccessResource(DATABASE, null, None)
- requests += AccessRequest(resource, ugi, opType, AccessType.USE)
- }
def addAccessRequest(objects: Iterable[PrivilegeObject], isInput: Boolean): Unit = {
objects.foreach { obj =>
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
index 2ffad1a2f81..4c0cf2a141d 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
@@ -54,11 +54,15 @@ sealed trait Descriptor {
*/
def extract(v: AnyRef): AnyRef
+ def comment: String
+
final def error(v: AnyRef, e: Throwable): String = {
val resourceName = getClass.getSimpleName.stripSuffix("Desc")
val objectClass = v.getClass.getName
s"[Spark$SPARK_VERSION] failed to get $resourceName from $objectClass by" +
- s" $fieldExtractor/$fieldName, due to ${e.getMessage}"
+ s" $fieldExtractor/$fieldName, " +
+ (if (comment.nonEmpty) s"desc comment: ${comment}") +
+ s"due to ${e.getMessage}"
}
}
@@ -70,7 +74,8 @@ sealed trait Descriptor {
*/
case class ColumnDesc(
fieldName: String,
- fieldExtractor: String) extends Descriptor {
+ fieldExtractor: String,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Seq[String] = {
val columnsVal = invokeAs[AnyRef](v, fieldName)
val columnExtractor = lookupExtractor[ColumnExtractor](fieldExtractor)
@@ -89,7 +94,8 @@ case class DatabaseDesc(
fieldName: String,
fieldExtractor: String,
catalogDesc: Option[CatalogDesc] = None,
- isInput: Boolean = false) extends Descriptor {
+ isInput: Boolean = false,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Database = {
val databaseVal = invokeAs[AnyRef](v, fieldName)
val databaseExtractor = lookupExtractor[DatabaseExtractor](fieldExtractor)
@@ -113,7 +119,8 @@ case class DatabaseDesc(
case class FunctionTypeDesc(
fieldName: String,
fieldExtractor: String,
- skipTypes: Seq[String]) extends Descriptor {
+ skipTypes: Seq[String],
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): FunctionType = {
extract(v, SparkSession.active)
}
@@ -143,7 +150,8 @@ case class FunctionDesc(
fieldExtractor: String,
databaseDesc: Option[DatabaseDesc] = None,
functionTypeDesc: Option[FunctionTypeDesc] = None,
- isInput: Boolean = false) extends Descriptor {
+ isInput: Boolean = false,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Function = {
val functionVal = invokeAs[AnyRef](v, fieldName)
val functionExtractor = lookupExtractor[FunctionExtractor](fieldExtractor)
@@ -168,7 +176,8 @@ case class FunctionDesc(
*/
case class QueryDesc(
fieldName: String,
- fieldExtractor: String = "LogicalPlanQueryExtractor") extends Descriptor {
+ fieldExtractor: String = "LogicalPlanQueryExtractor",
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Option[LogicalPlan] = {
val queryVal = invokeAs[AnyRef](v, fieldName)
val queryExtractor = lookupExtractor[QueryExtractor](fieldExtractor)
@@ -186,7 +195,8 @@ case class QueryDesc(
case class TableTypeDesc(
fieldName: String,
fieldExtractor: String,
- skipTypes: Seq[String]) extends Descriptor {
+ skipTypes: Seq[String],
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): TableType = {
extract(v, SparkSession.active)
}
@@ -224,7 +234,8 @@ case class TableDesc(
tableTypeDesc: Option[TableTypeDesc] = None,
catalogDesc: Option[CatalogDesc] = None,
isInput: Boolean = false,
- setCurrentDatabaseIfMissing: Boolean = false) extends Descriptor {
+ setCurrentDatabaseIfMissing: Boolean = false,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Option[Table] = {
extract(v, SparkSession.active)
}
@@ -254,7 +265,8 @@ case class TableDesc(
case class ActionTypeDesc(
fieldName: String = null,
fieldExtractor: String = null,
- actionType: Option[String] = None) extends Descriptor {
+ actionType: Option[String] = None,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): PrivilegeObjectActionType = {
actionType.map(PrivilegeObjectActionType.withName).getOrElse {
val actionTypeVal = invokeAs[AnyRef](v, fieldName)
@@ -272,7 +284,8 @@ case class ActionTypeDesc(
*/
case class CatalogDesc(
fieldName: String = "catalog",
- fieldExtractor: String = "CatalogPluginCatalogExtractor") extends Descriptor {
+ fieldExtractor: String = "CatalogPluginCatalogExtractor",
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Option[String] = {
val catalogVal = invokeAs[AnyRef](v, fieldName)
val catalogExtractor = lookupExtractor[CatalogExtractor](fieldExtractor)
@@ -283,7 +296,8 @@ case class CatalogDesc(
case class ScanDesc(
fieldName: String,
fieldExtractor: String,
- catalogDesc: Option[CatalogDesc] = None) extends Descriptor {
+ catalogDesc: Option[CatalogDesc] = None,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Option[Table] = {
extract(v, SparkSession.active)
}
@@ -317,7 +331,8 @@ case class ScanDesc(
case class UriDesc(
fieldName: String,
fieldExtractor: String,
- isInput: Boolean = false) extends Descriptor {
+ isInput: Boolean = false,
+ comment: String = "") extends Descriptor {
override def extract(v: AnyRef): Seq[Uri] = {
extract(v, SparkSession.active)
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DeltaCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DeltaCommands.scala
index d40caae7eba..12f434a50c1 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DeltaCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DeltaCommands.scala
@@ -25,12 +25,13 @@ object DeltaCommands extends CommandSpecs[TableCommandSpec] {
val DeleteCommand = {
val cmd = "org.apache.spark.sql.delta.commands.DeleteCommand"
- val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
+ val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE), comment = "Delta")
val tableDesc = TableDesc(
"target",
classOf[SubqueryAliasTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
- val uriDescs = Seq(UriDesc("target", classOf[SubqueryAliasURIExtractor]))
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Delta")
+ val uriDescs = Seq(UriDesc("target", classOf[SubqueryAliasURIExtractor], comment = "Delta"))
TableCommandSpec(cmd, Seq(tableDesc), uriDescs = uriDescs)
}
@@ -41,29 +42,31 @@ object DeltaCommands extends CommandSpecs[TableCommandSpec] {
val MergeIntoCommand = {
val cmd = "org.apache.spark.sql.delta.commands.MergeIntoCommand"
- val queryDesc = QueryDesc("source")
+ val queryDesc = QueryDesc("source", comment = "Delta")
DeleteCommand.copy(classname = cmd, queryDescs = Seq(queryDesc))
}
val OptimizeTableCommand = {
val cmd = "org.apache.spark.sql.delta.commands.OptimizeTableCommand"
- val childDesc = TableDesc("child", classOf[ResolvedTableTableExtractor])
- val tableDesc = TableDesc("tableId", classOf[TableIdentifierOptionTableExtractor])
+ val childDesc = TableDesc("child", classOf[ResolvedTableTableExtractor], comment = "Delta")
+ val tableDesc =
+ TableDesc("tableId", classOf[TableIdentifierOptionTableExtractor], comment = "Delta")
val uriDescs = Seq(
- UriDesc("child", classOf[ResolvedTableURIExtractor]),
- UriDesc("tableId", classOf[TableIdentifierOptionURIExtractor]),
- UriDesc("path", classOf[StringURIExtractor]))
+ UriDesc("child", classOf[ResolvedTableURIExtractor], comment = "Delta"),
+ UriDesc("tableId", classOf[TableIdentifierOptionURIExtractor], comment = "Delta"),
+ UriDesc("path", classOf[StringURIExtractor], comment = "Delta"))
TableCommandSpec(cmd, Seq(childDesc, tableDesc), ALTERTABLE_COMPACT, uriDescs = uriDescs)
}
val VacuumTableCommand = {
val cmd = "io.delta.tables.execution.VacuumTableCommand"
- val childDesc = TableDesc("child", classOf[ResolvedTableTableExtractor])
- val tableDesc = TableDesc("table", classOf[TableIdentifierOptionTableExtractor])
+ val childDesc = TableDesc("child", classOf[ResolvedTableTableExtractor], comment = "Delta")
+ val tableDesc =
+ TableDesc("table", classOf[TableIdentifierOptionTableExtractor], comment = "Delta")
val uriDescs = Seq(
- UriDesc("child", classOf[ResolvedTableURIExtractor]),
- UriDesc("table", classOf[TableIdentifierOptionURIExtractor]),
- UriDesc("path", classOf[StringURIExtractor]))
+ UriDesc("child", classOf[ResolvedTableURIExtractor], comment = "Delta"),
+ UriDesc("table", classOf[TableIdentifierOptionURIExtractor], comment = "Delta"),
+ UriDesc("path", classOf[StringURIExtractor], comment = "Delta"))
TableCommandSpec(cmd, Seq(childDesc, tableDesc), MSCK, uriDescs = uriDescs)
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
index 381f8081ac5..87fc509b5d0 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
@@ -25,24 +25,38 @@ import org.apache.kyuubi.plugin.spark.authz.serde.TableType._
object HudiCommands extends CommandSpecs[TableCommandSpec] {
val AlterHoodieTableAddColumnsCommand = {
val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand"
- val columnDesc = ColumnDesc("colsToAdd", classOf[StructFieldSeqColumnExtractor])
- val tableDesc = TableDesc("tableId", classOf[TableIdentifierTableExtractor], Some(columnDesc))
+ val columnDesc =
+ ColumnDesc("colsToAdd", classOf[StructFieldSeqColumnExtractor], comment = "Hudi")
+ val tableDesc = TableDesc(
+ "tableId",
+ classOf[TableIdentifierTableExtractor],
+ Some(columnDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_ADDCOLS)
}
val AlterHoodieTableChangeColumnCommand = {
val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableChangeColumnCommand"
- val columnDesc = ColumnDesc("columnName", classOf[StringColumnExtractor])
+ val columnDesc = ColumnDesc("columnName", classOf[StringColumnExtractor], comment = "Hudi")
val tableDesc =
- TableDesc("tableIdentifier", classOf[TableIdentifierTableExtractor], Some(columnDesc))
+ TableDesc(
+ "tableIdentifier",
+ classOf[TableIdentifierTableExtractor],
+ Some(columnDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_REPLACECOLS)
}
val AlterHoodieTableDropPartitionCommand = {
val cmd = "org.apache.spark.sql.hudi.command.AlterHoodieTableDropPartitionCommand"
- val columnDesc = ColumnDesc("partitionSpecs", classOf[PartitionSeqColumnExtractor])
+ val columnDesc =
+ ColumnDesc("partitionSpecs", classOf[PartitionSeqColumnExtractor], comment = "Hudi")
val tableDesc =
- TableDesc("tableIdentifier", classOf[TableIdentifierTableExtractor], Some(columnDesc))
+ TableDesc(
+ "tableIdentifier",
+ classOf[TableIdentifierTableExtractor],
+ Some(columnDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_DROPPARTS)
}
@@ -52,30 +66,32 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
TableTypeDesc(
"oldName",
classOf[TableIdentifierTableTypeExtractor],
- Seq(TEMP_VIEW))
+ Seq(TEMP_VIEW),
+ comment = "Hudi")
val oldTableD = TableDesc(
"oldName",
classOf[TableIdentifierTableExtractor],
- tableTypeDesc = Some(oldTableTableTypeDesc))
+ tableTypeDesc = Some(oldTableTableTypeDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(oldTableD), ALTERTABLE_RENAME)
}
val AlterTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.AlterTableCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None)
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None, comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}
val Spark31AlterTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.Spark31AlterTableCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None)
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], None, comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
}
val CreateHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CreateHoodieTableCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
}
@@ -92,12 +108,14 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
val tableDesc1 = TableDesc(
"targetTable",
classOf[TableIdentifierTableExtractor],
- setCurrentDatabaseIfMissing = true)
+ setCurrentDatabaseIfMissing = true,
+ comment = "Hudi")
val tableDesc2 = TableDesc(
"sourceTable",
classOf[TableIdentifierTableExtractor],
isInput = true,
- setCurrentDatabaseIfMissing = true)
+ setCurrentDatabaseIfMissing = true,
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE)
}
@@ -107,7 +125,8 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
TableTypeDesc(
"tableIdentifier",
classOf[TableIdentifierTableTypeExtractor],
- Seq(TEMP_VIEW))
+ Seq(TEMP_VIEW),
+ comment = "Hudi")
TableCommandSpec(
cmd,
Seq(TableDesc(
@@ -124,30 +143,33 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
val TruncateHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand"
- val columnDesc = ColumnDesc("partitionSpec", classOf[PartitionOptionColumnExtractor])
+ val columnDesc =
+ ColumnDesc("partitionSpec", classOf[PartitionOptionColumnExtractor], comment = "Hudi")
val tableDesc =
TableDesc(
"tableIdentifier",
classOf[TableIdentifierTableExtractor],
- columnDesc = Some(columnDesc))
+ columnDesc = Some(columnDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), TRUNCATETABLE)
}
val CompactionHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CompactionHoodieTableCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
}
val CompactionShowHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CompactionShowHoodieTableCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], isInput = true)
+ val tableDesc =
+ TableDesc("table", classOf[CatalogTableTableExtractor], isInput = true, comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), SHOW_TBLPROPERTIES)
}
val CompactionHoodiePathCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CompactionHoodiePathCommand"
- val uriDesc = UriDesc("path", classOf[StringURIExtractor])
+ val uriDesc = UriDesc("path", classOf[StringURIExtractor], comment = "Hudi")
TableCommandSpec(
cmd,
Seq.empty,
@@ -157,31 +179,32 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
val CompactionShowHoodiePathCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CompactionShowHoodiePathCommand"
- val uriDesc = UriDesc("path", classOf[StringURIExtractor], isInput = true)
+ val uriDesc = UriDesc("path", classOf[StringURIExtractor], isInput = true, comment = "Hudi")
TableCommandSpec(cmd, Seq.empty, SHOW_TBLPROPERTIES, uriDescs = Seq(uriDesc))
}
val CreateIndexCommand = {
val cmd = "org.apache.spark.sql.hudi.command.CreateIndexCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), CREATEINDEX)
}
val DropIndexCommand = {
val cmd = "org.apache.spark.sql.hudi.command.DropIndexCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), DROPINDEX)
}
val ShowIndexCommand = {
val cmd = "org.apache.spark.sql.hudi.command.ShowIndexesCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], isInput = true)
+ val tableDesc =
+ TableDesc("table", classOf[CatalogTableTableExtractor], isInput = true, comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), SHOWINDEXES)
}
val RefreshIndexCommand = {
val cmd = "org.apache.spark.sql.hudi.command.RefreshIndexCommand"
- val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
+ val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), ALTERINDEX_REBUILD)
}
@@ -191,52 +214,62 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
"logicalRelation",
classOf[LogicalRelationTableExtractor],
actionTypeDesc =
- Some(ActionTypeDesc("overwrite", classOf[OverwriteOrInsertActionTypeExtractor])))
+ Some(ActionTypeDesc(
+ "overwrite",
+ classOf[OverwriteOrInsertActionTypeExtractor],
+ comment = "Hudi")),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
}
val ShowHoodieTablePartitionsCommand = {
val cmd = "org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand"
- val columnDesc = ColumnDesc("specOpt", classOf[PartitionOptionColumnExtractor])
+ val columnDesc =
+ ColumnDesc("specOpt", classOf[PartitionOptionColumnExtractor], comment = "Hudi")
val tableDesc = TableDesc(
"tableIdentifier",
classOf[TableIdentifierTableExtractor],
isInput = true,
- columnDesc = Some(columnDesc))
+ columnDesc = Some(columnDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), SHOWPARTITIONS)
}
val DeleteHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.DeleteHoodieTableCommand"
- val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
+ val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE), comment = "Hudi")
val tableDesc =
TableDesc(
"dft",
classOf[HudiDataSourceV2RelationTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc))
}
val UpdateHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.UpdateHoodieTableCommand"
- val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
+ val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE), comment = "Hudi")
val tableDesc =
TableDesc(
"ut",
classOf[HudiDataSourceV2RelationTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc))
}
val MergeIntoHoodieTableCommand = {
val cmd = "org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand"
- val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
+ val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE), comment = "Hudi")
val tableDesc =
TableDesc(
"mergeInto",
classOf[HudiMergeIntoTargetTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
- val queryDescs = QueryDesc("mergeInto", classOf[HudiMergeIntoSourceTableExtractor])
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Hudi")
+ val queryDescs =
+ QueryDesc("mergeInto", classOf[HudiMergeIntoSourceTableExtractor], comment = "Hudi")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDescs))
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
index 59f8eb7a607..33e94d718c2 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
@@ -30,7 +30,8 @@ object IcebergCommands extends CommandSpecs[TableCommandSpec] {
TableDesc(
"table",
classOf[DataSourceV2RelationTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Iceberg")
TableCommandSpec(cmd, Seq(tableDesc))
}
@@ -45,14 +46,15 @@ object IcebergCommands extends CommandSpecs[TableCommandSpec] {
val tableDesc = TableDesc(
"targetTable",
classOf[DataSourceV2RelationTableExtractor],
- actionTypeDesc = Some(actionTypeDesc))
+ actionTypeDesc = Some(actionTypeDesc),
+ comment = "Iceberg")
val queryDesc = QueryDesc("sourceTable")
TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
}
val CallProcedure = {
val cmd = "org.apache.spark.sql.catalyst.plans.logical.Call"
- val td = TableDesc("args", classOf[ExpressionSeqTableExtractor])
+ val td = TableDesc("args", classOf[ExpressionSeqTableExtractor], comment = "Iceberg")
TableCommandSpec(cmd, Seq(td), opType = OperationType.ALTERTABLE_PROPERTIES)
}
diff --git a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/session/FlinkSessionImpl.scala b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/session/FlinkSessionImpl.scala
index 5f8f0b8c049..5bfacc694da 100644
--- a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/session/FlinkSessionImpl.scala
+++ b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/session/FlinkSessionImpl.scala
@@ -109,6 +109,7 @@ class FlinkSessionImpl(
case TGetInfoType.CLI_SERVER_NAME | TGetInfoType.CLI_DBMS_NAME =>
TGetInfoValue.stringValue("Apache Flink")
case TGetInfoType.CLI_DBMS_VER => TGetInfoValue.stringValue(EnvironmentInformation.getVersion)
+ case TGetInfoType.CLI_ODBC_KEYWORDS => TGetInfoValue.stringValue("Unimplemented")
case _ => throw KyuubiSQLException(s"Unrecognized GetInfoType value: $infoType")
}
}
diff --git a/externals/kyuubi-hive-sql-engine/pom.xml b/externals/kyuubi-hive-sql-engine/pom.xml
index caed7e27c37..ad7a7676330 100644
--- a/externals/kyuubi-hive-sql-engine/pom.xml
+++ b/externals/kyuubi-hive-sql-engine/pom.xml
@@ -50,18 +50,6 @@
${project.version}
-
- org.apache.hive
- hive-service-rpc
- provided
-
-
-
- org.apache.thrift
- libfb303
- provided
-
-
com.google.code.findbugs
jsr305
@@ -73,12 +61,6 @@
commons-collections
-
- org.apache.thrift
- libthrift
- provided
-
-
com.google.guava
failureaccess
@@ -185,6 +167,7 @@
com.fasterxml.jackson.core:jackson-core
com.fasterxml.jackson.core:jackson-databind
com.fasterxml.jackson.module:jackson-module-scala_${scala.binary.version}
+ org.apache.hive:hive-service-rpc
org.apache.kyuubi:*
@@ -206,6 +189,18 @@
+
+
+ com.fasterxml.jackson
+ ${kyuubi.shade.packageName}.com.fasterxml.jackson
+
+ com.fasterxml.jackson.**
+
+
+
+
+
+
diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
index 3cc426c435a..3e6b8729db1 100644
--- a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
+++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
@@ -130,7 +130,15 @@ object HiveSQLEngine extends Logging {
} else {
val effectiveUser = UserGroupInformation.createProxyUser(sessionUser.get, realUser)
effectiveUser.doAs(new PrivilegedExceptionAction[Unit] {
- override def run(): Unit = startEngine()
+ override def run(): Unit = {
+ val engineCredentials =
+ kyuubiConf.getOption(KyuubiReservedKeys.KYUUBI_ENGINE_CREDENTIALS_KEY)
+ kyuubiConf.unset(KyuubiReservedKeys.KYUUBI_ENGINE_CREDENTIALS_KEY)
+ engineCredentials.filter(_.nonEmpty).foreach { credentials =>
+ HiveTBinaryFrontendService.renewDelegationToken(credentials)
+ }
+ startEngine()
+ }
})
}
diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveTBinaryFrontendService.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveTBinaryFrontendService.scala
index d7cc801d3f6..19356d7c645 100644
--- a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveTBinaryFrontendService.scala
+++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveTBinaryFrontendService.scala
@@ -17,11 +17,19 @@
package org.apache.kyuubi.engine.hive
+import org.apache.hadoop.io.Text
+import org.apache.hadoop.security.UserGroupInformation
+import org.apache.hive.service.rpc.thrift.{TRenewDelegationTokenReq, TRenewDelegationTokenResp}
+
+import org.apache.kyuubi.KyuubiSQLException
import org.apache.kyuubi.ha.client.{EngineServiceDiscovery, ServiceDiscovery}
import org.apache.kyuubi.service.{Serverable, Service, TBinaryFrontendService}
+import org.apache.kyuubi.service.TFrontendService.OK_STATUS
+import org.apache.kyuubi.util.KyuubiHadoopUtils
class HiveTBinaryFrontendService(override val serverable: Serverable)
extends TBinaryFrontendService("HiveTBinaryFrontend") {
+ import HiveTBinaryFrontendService._
override lazy val discoveryService: Option[Service] = {
if (ServiceDiscovery.supportServiceDiscovery(conf)) {
@@ -30,4 +38,39 @@ class HiveTBinaryFrontendService(override val serverable: Serverable)
None
}
}
+
+ override def RenewDelegationToken(req: TRenewDelegationTokenReq): TRenewDelegationTokenResp = {
+ debug(req.toString)
+
+ // We hacked `TCLIService.Iface.RenewDelegationToken` to transfer Credentials from Kyuubi
+ // Server to Hive SQL engine
+ val resp = new TRenewDelegationTokenResp()
+ try {
+ renewDelegationToken(req.getDelegationToken)
+ resp.setStatus(OK_STATUS)
+ } catch {
+ case e: Exception =>
+ warn("Error renew delegation tokens: ", e)
+ resp.setStatus(KyuubiSQLException.toTStatus(e))
+ }
+ resp
+ }
+}
+
+object HiveTBinaryFrontendService {
+
+ def renewDelegationToken(tokenStr: String): Unit = {
+ val currentUser = UserGroupInformation.getCurrentUser
+ // `currentUser` is either `UserGroupInformation.getLoginUser` or a proxy user.
+ // If `currentUser` is a proxy user, it needs a HIVE_DELEGATION_TOKEN to pass
+ // HiveMetastoreClient authentication.
+ if (currentUser.getAuthenticationMethod == UserGroupInformation.AuthenticationMethod.PROXY) {
+ val newCreds = KyuubiHadoopUtils.decodeCredentials(tokenStr)
+ KyuubiHadoopUtils.getTokenMap(newCreds).values
+ .find(_.getKind == new Text("HIVE_DELEGATION_TOKEN"))
+ .foreach { token =>
+ UserGroupInformation.getCurrentUser.addToken(token)
+ }
+ }
+ }
}
diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/session/HiveSessionManager.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/session/HiveSessionManager.scala
index d09912770cc..c2e8e793da2 100644
--- a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/session/HiveSessionManager.scala
+++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/session/HiveSessionManager.scala
@@ -18,13 +18,16 @@
package org.apache.kyuubi.engine.hive.session
import java.io.File
+import java.util.{List => JList}
import java.util.concurrent.Future
import scala.collection.JavaConverters._
+import scala.language.reflectiveCalls
import org.apache.hadoop.hive.conf.HiveConf
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hive.service.cli.{SessionHandle => ImportedSessionHandle}
-import org.apache.hive.service.cli.session.{HiveSessionImplwithUGI => ImportedHiveSessionImpl, HiveSessionProxy, SessionManager => ImportedHiveSessionManager}
+import org.apache.hive.service.cli.session.{HiveSessionImpl => ImportedHiveSessionImpl, HiveSessionImplwithUGI => ImportedHiveSessionImplwithUGI, HiveSessionProxy, SessionManager => ImportedHiveSessionManager}
import org.apache.hive.service.rpc.thrift.TProtocolVersion
import org.apache.kyuubi.config.KyuubiConf.ENGINE_SHARE_LEVEL
@@ -34,6 +37,7 @@ import org.apache.kyuubi.engine.hive.HiveSQLEngine
import org.apache.kyuubi.engine.hive.operation.HiveOperationManager
import org.apache.kyuubi.operation.OperationManager
import org.apache.kyuubi.session.{Session, SessionHandle, SessionManager}
+import org.apache.kyuubi.util.reflect.DynConstructors
class HiveSessionManager(engine: HiveSQLEngine) extends SessionManager("HiveSessionManager") {
override protected def isServer: Boolean = false
@@ -42,11 +46,14 @@ class HiveSessionManager(engine: HiveSQLEngine) extends SessionManager("HiveSess
private val internalSessionManager = new ImportedHiveSessionManager(null) {
+ var doAsEnabled: Boolean = _
+
/**
* Avoid unnecessary hive initialization
*/
override def init(hiveConf: HiveConf): Unit = {
// this.hiveConf = hiveConf
+ this.doAsEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)
}
/**
@@ -77,19 +84,68 @@ class HiveSessionManager(engine: HiveSQLEngine) extends SessionManager("HiveSess
getSessionOption).getOrElse {
val sessionHandle =
conf.get(KYUUBI_SESSION_HANDLE_KEY).map(SessionHandle.fromUUID).getOrElse(SessionHandle())
- val hive = {
- val sessionWithUGI = new ImportedHiveSessionImpl(
- new ImportedSessionHandle(sessionHandle.toTSessionHandle, protocol),
- protocol,
- user,
- password,
- HiveSQLEngine.hiveConf,
- ipAddress,
- null,
- Seq(ipAddress).asJava)
+ val hive = if (internalSessionManager.doAsEnabled) {
+ val sessionWithUGI = DynConstructors.builder()
+ .impl( // for Hive 3.1
+ classOf[ImportedHiveSessionImplwithUGI],
+ classOf[ImportedSessionHandle],
+ classOf[TProtocolVersion],
+ classOf[String],
+ classOf[String],
+ classOf[HiveConf],
+ classOf[String],
+ classOf[String],
+ classOf[JList[String]])
+ .impl( // for Hive 2.3
+ classOf[ImportedHiveSessionImplwithUGI],
+ classOf[ImportedSessionHandle],
+ classOf[TProtocolVersion],
+ classOf[String],
+ classOf[String],
+ classOf[HiveConf],
+ classOf[String],
+ classOf[String])
+ .build[ImportedHiveSessionImplwithUGI]()
+ .newInstance(
+ new ImportedSessionHandle(sessionHandle.toTSessionHandle, protocol),
+ protocol,
+ user,
+ password,
+ HiveSQLEngine.hiveConf,
+ ipAddress,
+ null,
+ Seq(ipAddress).asJava)
val proxy = HiveSessionProxy.getProxy(sessionWithUGI, sessionWithUGI.getSessionUgi)
sessionWithUGI.setProxySession(proxy)
proxy
+ } else {
+ DynConstructors.builder()
+ .impl( // for Hive 3.1
+ classOf[ImportedHiveSessionImpl],
+ classOf[ImportedSessionHandle],
+ classOf[TProtocolVersion],
+ classOf[String],
+ classOf[String],
+ classOf[HiveConf],
+ classOf[String],
+ classOf[JList[String]])
+ .impl( // for Hive 2.3
+ classOf[ImportedHiveSessionImpl],
+ classOf[ImportedSessionHandle],
+ classOf[TProtocolVersion],
+ classOf[String],
+ classOf[String],
+ classOf[HiveConf],
+ classOf[String])
+ .build[ImportedHiveSessionImpl]()
+ .newInstance(
+ new ImportedSessionHandle(sessionHandle.toTSessionHandle, protocol),
+ protocol,
+ user,
+ password,
+ HiveSQLEngine.hiveConf,
+ ipAddress,
+ Seq(ipAddress).asJava)
}
hive.setSessionManager(internalSessionManager)
hive.setOperationManager(internalSessionManager.getOperationManager)
diff --git a/externals/kyuubi-jdbc-engine/pom.xml b/externals/kyuubi-jdbc-engine/pom.xml
index 69870c27870..0ec905f202d 100644
--- a/externals/kyuubi-jdbc-engine/pom.xml
+++ b/externals/kyuubi-jdbc-engine/pom.xml
@@ -58,6 +58,12 @@
test
+
+ com.dimafeng
+ testcontainers-scala-mysql_${scala.binary.version}
+ test
+
+
org.apache.kyuubi
${hive.jdbc.artifact}
diff --git a/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider b/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider
index db146a42cd6..1a7ac9467c4 100644
--- a/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider
+++ b/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider
@@ -16,5 +16,6 @@
#
org.apache.kyuubi.engine.jdbc.doris.DorisConnectionProvider
+org.apache.kyuubi.engine.jdbc.mysql.MySQLConnectionProvider
org.apache.kyuubi.engine.jdbc.phoenix.PhoenixConnectionProvider
org.apache.kyuubi.engine.jdbc.postgresql.PostgreSQLConnectionProvider
diff --git a/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.dialect.JdbcDialect b/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.dialect.JdbcDialect
index 1529f859824..9f97ab5d728 100644
--- a/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.dialect.JdbcDialect
+++ b/externals/kyuubi-jdbc-engine/src/main/resources/META-INF/services/org.apache.kyuubi.engine.jdbc.dialect.JdbcDialect
@@ -16,5 +16,6 @@
#
org.apache.kyuubi.engine.jdbc.dialect.DorisDialect
+org.apache.kyuubi.engine.jdbc.dialect.MySQLDialect
org.apache.kyuubi.engine.jdbc.dialect.PhoenixDialect
org.apache.kyuubi.engine.jdbc.dialect.PostgreSQLDialect
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/connection/ConnectionProvider.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/connection/ConnectionProvider.scala
index cb6e4b6c551..f8ec72dca93 100644
--- a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/connection/ConnectionProvider.scala
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/connection/ConnectionProvider.scala
@@ -27,7 +27,7 @@ import org.apache.kyuubi.util.reflect.ReflectUtils._
abstract class AbstractConnectionProvider extends Logging {
protected val providers = loadProviders()
- def getProviderClass(kyuubiConf: KyuubiConf): String = {
+ def getDriverClass(kyuubiConf: KyuubiConf): String = {
val driverClass: Class[_ <: Driver] = Option(
DynClasses.builder().impl(kyuubiConf.get(ENGINE_JDBC_DRIVER_CLASS).get)
.orNull().build[Driver]()).getOrElse {
@@ -38,7 +38,7 @@ abstract class AbstractConnectionProvider extends Logging {
}
def create(kyuubiConf: KyuubiConf): Connection = {
- val filteredProviders = providers.filter(_.canHandle(getProviderClass(kyuubiConf)))
+ val filteredProviders = providers.filter(_.canHandle(getDriverClass(kyuubiConf)))
if (filteredProviders.isEmpty) {
throw new IllegalArgumentException(
"Empty list of JDBC connection providers for the specified driver and options")
@@ -57,10 +57,9 @@ abstract class AbstractConnectionProvider extends Logging {
case None =>
// TODO
if (filteredProviders.size != 1) {
- throw new IllegalArgumentException(
- "JDBC connection initiated but more than one connection provider was found. Use " +
- s"${ENGINE_JDBC_CONNECTION_PROVIDER.key} option to select a specific provider. " +
- s"Found active providers ${filteredProviders.mkString("[", ", ", "]")}")
+ warn("JDBC connection initiated but more than one connection provider was found. Use " +
+ s"${ENGINE_JDBC_CONNECTION_PROVIDER.key} option to select a specific provider. " +
+ s"Found active providers ${filteredProviders.mkString("[", ", ", "]")}")
}
filteredProviders.head
}
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/dialect/MySQLDialect.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/dialect/MySQLDialect.scala
new file mode 100644
index 00000000000..1cafcd9a9a9
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/dialect/MySQLDialect.scala
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.dialect
+import java.sql.{Connection, ResultSet, Statement}
+import java.util
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ArrayBuffer
+
+import org.apache.commons.lang3.StringUtils
+
+import org.apache.kyuubi.engine.jdbc.mysql.{MySQLRowSetHelper, MySQLSchemaHelper}
+import org.apache.kyuubi.engine.jdbc.schema.{RowSetHelper, SchemaHelper}
+import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.session.Session
+
+class MySQLDialect extends JdbcDialect {
+ override def createStatement(connection: Connection, fetchSize: Int): Statement = {
+ val statement =
+ connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
+ statement.setFetchSize(Integer.MIN_VALUE)
+ statement
+ }
+
+ override def getTablesQuery(
+ catalog: String,
+ schema: String,
+ tableName: String,
+ tableTypes: util.List[String]): String = {
+ val tTypes =
+ if (tableTypes == null || tableTypes.isEmpty) {
+ Set("BASE TABLE", "SYSTEM VIEW", "VIEW")
+ } else {
+ tableTypes.asScala.toSet
+ }
+ val query = new StringBuilder(
+ s"""
+ |SELECT TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE, ENGINE,
+ |TABLE_ROWS, AVG_ROW_LENGTH, DATA_LENGTH,
+ |CREATE_TIME, UPDATE_TIME, TABLE_COLLATION, TABLE_COMMENT
+ |FROM INFORMATION_SCHEMA.TABLES
+ |""".stripMargin)
+
+ val filters = ArrayBuffer[String]()
+ if (StringUtils.isNotBlank(catalog)) {
+ filters += s"$TABLE_CATALOG = '$catalog'"
+ }
+
+ if (StringUtils.isNotBlank(schema)) {
+ filters += s"$TABLE_SCHEMA LIKE '$schema'"
+ }
+
+ if (StringUtils.isNotBlank(tableName)) {
+ filters += s"$TABLE_NAME LIKE '$tableName'"
+ }
+
+ if (tTypes.nonEmpty) {
+ filters += s"(${
+ tTypes.map { tableType => s"$TABLE_TYPE = '$tableType'" }
+ .mkString(" OR ")
+ })"
+ }
+
+ if (filters.nonEmpty) {
+ query.append(" WHERE ")
+ query.append(filters.mkString(" AND "))
+ }
+
+ query.toString()
+ }
+
+ override def getColumnsQuery(
+ session: Session,
+ catalogName: String,
+ schemaName: String,
+ tableName: String,
+ columnName: String): String = {
+ val query = new StringBuilder(
+ """
+ |SELECT
+ |`TABLE_CATALOG`,`TABLE_SCHEMA`,`TABLE_NAME`, `COLUMN_NAME`,`ORDINAL_POSITION`,
+ |`COLUMN_DEFAULT`,`IS_NULLABLE`,`DATA_TYPE`,`CHARACTER_MAXIMUM_LENGTH`,
+ |`CHARACTER_OCTET_LENGTH`,`NUMERIC_PRECISION`,`NUMERIC_SCALE`,`DATETIME_PRECISION`,
+ |`CHARACTER_SET_NAME`,`COLLATION_NAME`,`COLUMN_TYPE`,`COLUMN_KEY`,`EXTRA`,`PRIVILEGES`,
+ |`COLUMN_COMMENT`,`GENERATION_EXPRESSION`
+ |FROM information_schema.columns
+ |""".stripMargin)
+
+ val filters = ArrayBuffer[String]()
+ if (StringUtils.isNotEmpty(catalogName)) {
+ filters += s"$TABLE_CATALOG = '$catalogName'"
+ }
+ if (StringUtils.isNotEmpty(schemaName)) {
+ filters += s"$TABLE_SCHEMA LIKE '$schemaName'"
+ }
+ if (StringUtils.isNotEmpty(tableName)) {
+ filters += s"$TABLE_NAME LIKE '$tableName'"
+ }
+ if (StringUtils.isNotEmpty(columnName)) {
+ filters += s"$COLUMN_NAME LIKE '$columnName'"
+ }
+
+ if (filters.nonEmpty) {
+ query.append(" WHERE ")
+ query.append(filters.mkString(" AND "))
+ }
+
+ query.toString()
+ }
+
+ override def getRowSetHelper(): RowSetHelper = {
+ new MySQLRowSetHelper
+ }
+
+ override def getSchemaHelper(): SchemaHelper = {
+ new MySQLSchemaHelper
+ }
+
+ override def name(): String = {
+ "mysql"
+ }
+}
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLConnectionProvider.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLConnectionProvider.scala
new file mode 100644
index 00000000000..249ea0c31f6
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLConnectionProvider.scala
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+class MySQLConnectionProvider extends Mysql8ConnectionProvider {
+
+ override val name: String = classOf[MySQLConnectionProvider].getSimpleName
+}
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLRowSetHelper.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLRowSetHelper.scala
new file mode 100644
index 00000000000..1c85cb009dd
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLRowSetHelper.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import java.sql.Types
+
+import org.apache.hive.service.rpc.thrift.{TColumn, TColumnValue}
+
+import org.apache.kyuubi.engine.jdbc.schema.RowSetHelper
+
+class MySQLRowSetHelper extends RowSetHelper {
+
+ override def toTinyIntTColumn(rows: Seq[Seq[Any]], ordinal: Int): TColumn =
+ toIntegerTColumn(rows, ordinal)
+
+ override def toSmallIntTColumn(rows: Seq[Seq[Any]], ordinal: Int): TColumn =
+ toIntegerTColumn(rows, ordinal)
+
+ override def toTinyIntTColumnValue(row: List[Any], ordinal: Int): TColumnValue =
+ toIntegerTColumnValue(row, ordinal)
+
+ override def toSmallIntTColumnValue(row: List[Any], ordinal: Int): TColumnValue =
+ toIntegerTColumnValue(row, ordinal)
+
+ override protected def toIntegerTColumn(rows: Seq[Seq[Any]], ordinal: Int): TColumn = {
+ val colHead = if (rows.isEmpty) None else rows.head(ordinal)
+ colHead match {
+ case v: Integer => super.toIntegerTColumn(rows, ordinal)
+ case v: java.lang.Long => super.toBigIntTColumn(rows, ordinal)
+ case _ => super.toDefaultTColumn(rows, ordinal, Types.INTEGER)
+ }
+ }
+
+ override protected def toIntegerTColumnValue(row: List[Any], ordinal: Int): TColumnValue = {
+ row(ordinal) match {
+ case v: Integer => super.toIntegerTColumnValue(row, ordinal)
+ case v: java.lang.Long => super.toBigIntTColumnValue(row, ordinal)
+ case _ => super.toDefaultTColumnValue(row, ordinal, Types.INTEGER)
+ }
+ }
+
+ override protected def toBigIntTColumn(rows: Seq[Seq[Any]], ordinal: Int): TColumn = {
+ val colHead = if (rows.isEmpty) None else rows.head(ordinal)
+ colHead match {
+ case v: java.lang.Long => super.toBigIntTColumn(rows, ordinal)
+ case _ => super.toDefaultTColumn(rows, ordinal, Types.BIGINT)
+ }
+ }
+
+ override protected def toBigIntTColumnValue(row: List[Any], ordinal: Int): TColumnValue =
+ row(ordinal) match {
+ case v: java.lang.Long => super.toBigIntTColumnValue(row, ordinal)
+ case _ => super.toDefaultTColumnValue(row, ordinal, Types.BIGINT)
+ }
+}
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLSchemaHelper.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLSchemaHelper.scala
new file mode 100644
index 00000000000..b7351b26b3e
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLSchemaHelper.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import org.apache.kyuubi.engine.jdbc.schema.SchemaHelper
+
+class MySQLSchemaHelper extends SchemaHelper {}
diff --git a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/schema/RowSetHelper.scala b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/schema/RowSetHelper.scala
index 74b4cec108d..714b3bb7e76 100644
--- a/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/schema/RowSetHelper.scala
+++ b/externals/kyuubi-jdbc-engine/src/main/scala/org/apache/kyuubi/engine/jdbc/schema/RowSetHelper.scala
@@ -49,7 +49,7 @@ abstract class RowSetHelper {
val columnSize = row.size
var j = 0
while (j < columnSize) {
- val columnValue = toTColumnValue(j, row, columns)
+ val columnValue = toTColumnValue(j, row, columns(i).sqlType)
tRow.addToColVals(columnValue)
j += 1
}
@@ -110,8 +110,8 @@ abstract class RowSetHelper {
}
}
- protected def toTColumnValue(ordinal: Int, row: List[Any], types: List[Column]): TColumnValue = {
- types(ordinal).sqlType match {
+ protected def toTColumnValue(ordinal: Int, row: List[Any], sqlType: Int): TColumnValue = {
+ sqlType match {
case Types.BIT =>
toBitTColumnValue(row, ordinal)
@@ -140,7 +140,7 @@ abstract class RowSetHelper {
toVarcharTColumnValue(row, ordinal)
case _ =>
- toDefaultTColumnValue(row, ordinal, types)
+ toDefaultTColumnValue(row, ordinal, sqlType)
}
}
@@ -299,11 +299,11 @@ abstract class RowSetHelper {
protected def toDefaultTColumnValue(
row: List[Any],
ordinal: Int,
- types: List[Column]): TColumnValue = {
+ sqlType: Int): TColumnValue = {
val tStrValue = new TStringValue
if (row(ordinal) != null) {
tStrValue.setValue(
- toHiveString(row(ordinal), types(ordinal).sqlType))
+ toHiveString(row(ordinal), sqlType))
}
TColumnValue.stringVal(tStrValue)
}
@@ -316,6 +316,8 @@ abstract class RowSetHelper {
formatLocalDateTime(dateTime)
case (decimal: java.math.BigDecimal, Types.DECIMAL) =>
decimal.toPlainString
+ case (bigint: java.math.BigInteger, Types.BIGINT) =>
+ bigint.toString()
case (other, _) =>
other.toString
}
diff --git a/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLOperationSuite.scala b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLOperationSuite.scala
new file mode 100644
index 00000000000..ffd7c0a0fe8
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/MySQLOperationSuite.scala
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import java.sql.ResultSet
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.apache.kyuubi.operation.HiveJDBCTestHelper
+import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+
+abstract class MySQLOperationSuite extends WithMySQLEngine with HiveJDBCTestHelper {
+ test("mysql - get tables") {
+ case class Table(catalog: String, schema: String, tableName: String, tableType: String)
+
+ withJdbcStatement() { statement =>
+ val meta = statement.getConnection.getMetaData
+ val resultBuffer = ArrayBuffer[Table]()
+
+ var tables = meta.getTables(null, null, null, null)
+ while (tables.next()) {
+ resultBuffer +=
+ Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "information_schema", "TABLES", "SYSTEM VIEW")))
+ assert(resultBuffer.contains(Table("def", "information_schema", "VIEWS", "SYSTEM VIEW")))
+ resultBuffer.clear()
+
+ statement.execute("create database if not exists db1")
+ statement.execute("create table db1.test1(id bigint)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+ statement.execute("create table db1.test2(id bigint)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+
+ statement.execute("create database if not exists db2")
+ statement.execute("create table db2.test1(id bigint)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+ statement.execute("create table db2.test2(id bigint)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+
+ statement.execute("create view db1.view1 (k1) as select id from db1.test1")
+
+ tables = meta.getTables(null, "db1", "test1", Array("BASE TABLE"))
+ while (tables.next()) {
+ val table = Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ assert(table == Table("def", "db1", "test1", "BASE TABLE"))
+ }
+
+ tables = meta.getTables("def", "db1", null, null)
+ while (tables.next()) {
+ resultBuffer += Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "db1", "test2", "BASE TABLE")))
+ resultBuffer.clear()
+
+ tables = meta.getTables(null, null, "test1", null)
+ while (tables.next()) {
+ resultBuffer += Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "db1", "test1", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db2", "test1", "BASE TABLE")))
+ resultBuffer.clear()
+
+ tables = meta.getTables(null, "db%", "test1", null)
+ while (tables.next()) {
+ resultBuffer += Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "db1", "test1", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db2", "test1", "BASE TABLE")))
+ resultBuffer.clear()
+
+ tables = meta.getTables(null, "db2", "test%", null)
+ while (tables.next()) {
+ resultBuffer += Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "db2", "test1", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db2", "test2", "BASE TABLE")))
+ resultBuffer.clear()
+
+ tables = meta.getTables(null, "fake_db", "test1", null)
+ assert(!tables.next())
+
+ tables = meta.getTables(null, "db1", null, Array("VIEW"))
+ while (tables.next()) {
+ val table = Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ assert(table == Table("def", "db1", "view1", "VIEW"))
+ }
+
+ tables = meta.getTables(null, null, null, Array("VIEW", "BASE TABLE"))
+ while (tables.next()) {
+ resultBuffer += Table(
+ tables.getString(TABLE_CATALOG),
+ tables.getString(TABLE_SCHEMA),
+ tables.getString(TABLE_NAME),
+ tables.getString(TABLE_TYPE))
+ }
+ assert(resultBuffer.contains(Table("def", "db1", "test1", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db1", "test2", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db2", "test1", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db2", "test2", "BASE TABLE")))
+ assert(resultBuffer.contains(Table("def", "db1", "view1", "VIEW")))
+ resultBuffer.clear()
+
+ statement.execute("drop view db1.view1")
+ statement.execute("drop table db1.test1")
+ statement.execute("drop table db1.test2")
+ statement.execute("drop table db2.test1")
+ statement.execute("drop table db2.test2")
+ statement.execute("drop database db1")
+ statement.execute("drop database db2")
+ }
+ }
+
+ test("mysql - get columns") {
+ case class Column(tableSchema: String, tableName: String, columnName: String)
+
+ def buildColumn(resultSet: ResultSet): Column = {
+ val schema = resultSet.getString(TABLE_SCHEMA)
+ val tableName = resultSet.getString(TABLE_NAME)
+ val columnName = resultSet.getString(COLUMN_NAME)
+ val column = Column(schema, tableName, columnName)
+ column
+ }
+
+ withJdbcStatement() { statement =>
+ val metadata = statement.getConnection.getMetaData
+ statement.execute("create database if not exists db1")
+ statement.execute("create table if not exists db1.test1" +
+ "(id bigint, str1 varchar(255), str2 varchar(255), age int)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+ statement.execute("create table if not exists db1.test2" +
+ "(id bigint, str1 varchar(255), str2 varchar(255), age int)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+
+ statement.execute("create database if not exists db2")
+
+ statement.execute("create table if not exists db2.test1" +
+ "(id bigint, str1 varchar(255), str2 varchar(255), age int)" +
+ "ENGINE=InnoDB DEFAULT CHARSET=utf8;")
+
+ val resultBuffer = ArrayBuffer[Column]()
+ val resultSet1 = metadata.getColumns(null, "db1", null, null)
+ while (resultSet1.next()) {
+ val column = buildColumn(resultSet1)
+ resultBuffer += column
+ }
+
+ assert(resultBuffer.contains(Column("db1", "test1", "id")))
+ assert(resultBuffer.contains(Column("db1", "test1", "str1")))
+ assert(resultBuffer.contains(Column("db1", "test1", "str2")))
+ assert(resultBuffer.contains(Column("db1", "test1", "age")))
+
+ assert(resultBuffer.contains(Column("db1", "test2", "id")))
+ assert(resultBuffer.contains(Column("db1", "test2", "str1")))
+ assert(resultBuffer.contains(Column("db1", "test2", "str2")))
+ assert(resultBuffer.contains(Column("db1", "test2", "age")))
+
+ resultBuffer.clear()
+
+ val resultSet2 = metadata.getColumns(null, null, "test1", null)
+ while (resultSet2.next()) {
+ val column = buildColumn(resultSet2)
+ resultBuffer += column
+ }
+
+ assert(resultBuffer.contains(Column("db1", "test1", "id")))
+ assert(resultBuffer.contains(Column("db1", "test1", "str1")))
+ assert(resultBuffer.contains(Column("db1", "test1", "str2")))
+ assert(resultBuffer.contains(Column("db1", "test1", "age")))
+
+ assert(resultBuffer.contains(Column("db2", "test1", "id")))
+ assert(resultBuffer.contains(Column("db2", "test1", "str1")))
+ assert(resultBuffer.contains(Column("db2", "test1", "str2")))
+ assert(resultBuffer.contains(Column("db2", "test1", "age")))
+
+ resultBuffer.clear()
+
+ val resultSet3 = metadata.getColumns(null, null, null, "age")
+ while (resultSet3.next()) {
+ val column = buildColumn(resultSet3)
+ resultBuffer += column
+ }
+
+ assert(resultBuffer.contains(Column("db1", "test1", "age")))
+ assert(resultBuffer.contains(Column("db1", "test2", "age")))
+ assert(resultBuffer.contains(Column("db2", "test1", "age")))
+
+ resultBuffer.clear()
+
+ val resultSet4 = metadata.getColumns(null, "d%1", "t%1", "str%")
+ while (resultSet4.next()) {
+ val column = buildColumn(resultSet4)
+ resultBuffer += column
+ }
+
+ assert(resultBuffer.contains(Column("db1", "test1", "str1")))
+ assert(resultBuffer.contains(Column("db1", "test1", "str2")))
+
+ resultBuffer.clear()
+
+ val resultSet5 = metadata.getColumns(null, "d%1", "t%1", "fake")
+ assert(!resultSet5.next())
+
+ statement.execute("drop table db1.test1")
+ statement.execute("drop table db1.test2")
+ statement.execute("drop database db1")
+ statement.execute("drop table db2.test1")
+ statement.execute("drop database db2")
+ }
+ }
+}
diff --git a/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/OperationWithEngineSuite.scala b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/OperationWithEngineSuite.scala
new file mode 100644
index 00000000000..4cf76427d60
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/OperationWithEngineSuite.scala
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import org.apache.hive.service.rpc.thrift._
+
+import org.apache.kyuubi.config.KyuubiConf
+import org.apache.kyuubi.engine.jdbc.connection.ConnectionProvider
+import org.apache.kyuubi.operation.HiveJDBCTestHelper
+
+class OperationWithEngineSuite extends MySQLOperationSuite with HiveJDBCTestHelper {
+
+ override protected def jdbcUrl: String = jdbcConnectionUrl
+
+ test("Test for Jdbc engine getInfo") {
+ val metaData = ConnectionProvider.create(kyuubiConf).getMetaData
+
+ withSessionConf(Map(KyuubiConf.SERVER_INFO_PROVIDER.key -> "ENGINE"))()() {
+ withSessionHandle { (client, handle) =>
+ val req = new TGetInfoReq()
+ req.setSessionHandle(handle)
+ req.setInfoType(TGetInfoType.CLI_DBMS_NAME)
+ assert(client.GetInfo(req).getInfoValue.getStringValue == metaData.getDatabaseProductName)
+
+ val req2 = new TGetInfoReq()
+ req2.setSessionHandle(handle)
+ req2.setInfoType(TGetInfoType.CLI_DBMS_VER)
+ assert(
+ client.GetInfo(req2).getInfoValue.getStringValue == metaData.getDatabaseProductVersion)
+
+ val req3 = new TGetInfoReq()
+ req3.setSessionHandle(handle)
+ req3.setInfoType(TGetInfoType.CLI_MAX_COLUMN_NAME_LEN)
+ assert(client.GetInfo(req3).getInfoValue.getLenValue == metaData.getMaxColumnNameLength)
+
+ val req4 = new TGetInfoReq()
+ req4.setSessionHandle(handle)
+ req4.setInfoType(TGetInfoType.CLI_MAX_SCHEMA_NAME_LEN)
+ assert(client.GetInfo(req4).getInfoValue.getLenValue == metaData.getMaxSchemaNameLength)
+
+ val req5 = new TGetInfoReq()
+ req5.setSessionHandle(handle)
+ req5.setInfoType(TGetInfoType.CLI_MAX_TABLE_NAME_LEN)
+ assert(client.GetInfo(req5).getInfoValue.getLenValue == metaData.getMaxTableNameLength)
+ }
+ }
+ }
+
+ test("JDBC ExecuteStatement operation should contain operationLog") {
+ withSessionHandle { (client, handle) =>
+ val tExecuteStatementReq = new TExecuteStatementReq()
+ tExecuteStatementReq.setSessionHandle(handle)
+ tExecuteStatementReq.setStatement("SELECT 1")
+ val tExecuteStatementResp = client.ExecuteStatement(tExecuteStatementReq)
+
+ val tFetchResultsReq = new TFetchResultsReq()
+ tFetchResultsReq.setOperationHandle(tExecuteStatementResp.getOperationHandle)
+ tFetchResultsReq.setFetchType(1)
+ tFetchResultsReq.setMaxRows(1)
+
+ val tFetchResultsResp = client.FetchResults(tFetchResultsReq)
+ assert(tFetchResultsResp.getStatus.getStatusCode === TStatusCode.SUCCESS_STATUS)
+ }
+ }
+}
diff --git a/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/SessionSuite.scala b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/SessionSuite.scala
new file mode 100644
index 00000000000..65107603d77
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/SessionSuite.scala
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import org.apache.kyuubi.operation.HiveJDBCTestHelper
+
+class SessionSuite extends WithMySQLEngine with HiveJDBCTestHelper {
+
+ test("test session") {
+ withJdbcStatement() { statement =>
+ val resultSet = statement.executeQuery(
+ "select '1' as id")
+ val metadata = resultSet.getMetaData
+ for (i <- 1 to metadata.getColumnCount) {
+ assert(metadata.getColumnName(i) == "id")
+ }
+ while (resultSet.next()) {
+ val id = resultSet.getObject(1)
+ assert(id == "1")
+ }
+ }
+ }
+
+ override protected def jdbcUrl: String = jdbcConnectionUrl
+}
diff --git a/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/StatementSuite.scala b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/StatementSuite.scala
new file mode 100644
index 00000000000..56ae737fc80
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/StatementSuite.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import java.sql.{Date, Timestamp}
+
+import org.apache.kyuubi.operation.HiveJDBCTestHelper
+
+class StatementSuite extends WithMySQLEngine with HiveJDBCTestHelper {
+
+ test("test select") {
+ withJdbcStatement("test1") { statement =>
+ statement.execute("create database if not exists db1")
+ statement.execute("use db1")
+ statement.execute("create table db1.test1(id bigint, name varchar(255), age int, " +
+ "PRIMARY KEY ( `id` ))" +
+ "ENGINE=InnoDB " +
+ "DEFAULT CHARSET=utf8;")
+ statement.execute("insert into db1.test1 values(1, 'a', 11)")
+
+ val resultSet1 = statement.executeQuery("select * from db1.test1")
+ while (resultSet1.next()) {
+ val id = resultSet1.getObject(1)
+ assert(id == 1)
+ val name = resultSet1.getObject(2)
+ assert(name == "a")
+ val age = resultSet1.getObject(3)
+ assert(age == 11)
+ }
+ }
+ }
+
+ test("test types") {
+ withJdbcStatement("test1") { statement =>
+ statement.execute("create database if not exists db1")
+ statement.execute("use db1")
+ statement.execute("create table db1.type_test(" +
+ "id bigint, " +
+ "tiny_col tinyint, smallint_col smallint, " +
+ "int_col int, bigint_col bigint, " +
+ "decimal_col decimal(27, 9)," +
+ "date_col date, datetime_col datetime, timestamp_col timestamp," +
+ "char_col char, varchar_col varchar(255), " +
+ "boolean_col boolean, " +
+ "double_col double, float_col float," +
+ "PRIMARY KEY ( `id` )) " +
+ "ENGINE=InnoDB " +
+ "DEFAULT CHARSET=utf8")
+ statement.execute("insert into db1.type_test" +
+ "(id, " +
+ "tiny_col, smallint_col, int_col, bigint_col, " +
+ "decimal_col, " +
+ "date_col, datetime_col, timestamp_col," +
+ "char_col, varchar_col, " +
+ "boolean_col, " +
+ "double_col, float_col) " +
+ "VALUES (1, 2, 3, 4, 5, 6.6, '2023-10-23', '2023-10-23 15:31:45', " +
+ "'2023-10-23 15:31:45', 'a', 'Hello', true, 7.7, 8.8)")
+
+ val resultSet1 = statement.executeQuery("select * from db1.type_test")
+ while (resultSet1.next()) {
+ assert(resultSet1.getObject(1) == 1)
+ assert(resultSet1.getObject(2) == 2)
+ assert(resultSet1.getObject(3) == 3)
+ assert(resultSet1.getObject(4) == 4)
+ assert(resultSet1.getObject(5) == 5)
+ assert(resultSet1.getObject(6) == new java.math.BigDecimal("6.600000000"))
+ assert(resultSet1.getObject(7) == Date.valueOf("2023-10-23"))
+ assert(resultSet1.getObject(8) == Timestamp.valueOf("2023-10-23 15:31:45"))
+ assert(resultSet1.getObject(9) == Timestamp.valueOf("2023-10-23 15:31:45"))
+ assert(resultSet1.getObject(10) == "a")
+ assert(resultSet1.getObject(11) == "Hello")
+ assert(resultSet1.getObject(12) == true)
+ assert(resultSet1.getObject(13) == 7.7)
+ assert(resultSet1.getObject(14) == 8.8)
+ }
+ }
+ }
+
+ override protected def jdbcUrl: String = jdbcConnectionUrl
+}
diff --git a/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/WithMySQLEngine.scala b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/WithMySQLEngine.scala
new file mode 100644
index 00000000000..bd79e0a27b6
--- /dev/null
+++ b/externals/kyuubi-jdbc-engine/src/test/scala/org/apache/kyuubi/engine/jdbc/mysql/WithMySQLEngine.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kyuubi.engine.jdbc.mysql
+
+import com.dimafeng.testcontainers.MySQLContainer
+import com.dimafeng.testcontainers.scalatest.TestContainerForAll
+import org.testcontainers.utility.DockerImageName
+
+import org.apache.kyuubi.config.KyuubiConf._
+import org.apache.kyuubi.engine.jdbc.WithJdbcEngine
+
+trait WithMySQLEngine extends WithJdbcEngine with TestContainerForAll {
+
+ private val mysqlDockerImage = "mysql:8.0.32"
+
+ override val containerDef = MySQLContainer.Def(
+ dockerImageName = DockerImageName.parse(mysqlDockerImage),
+ username = "root",
+ password = "kyuubi")
+
+ override def withKyuubiConf: Map[String, String] = withContainers { mysqlContainer =>
+ Map(
+ ENGINE_SHARE_LEVEL.key -> "SERVER",
+ ENGINE_JDBC_CONNECTION_URL.key -> mysqlContainer.jdbcUrl,
+ ENGINE_JDBC_CONNECTION_USER.key -> "root",
+ ENGINE_JDBC_CONNECTION_PASSWORD.key -> "kyuubi",
+ ENGINE_TYPE.key -> "jdbc",
+ ENGINE_JDBC_SHORT_NAME.key -> "mysql",
+ ENGINE_JDBC_DRIVER_CLASS.key -> "com.mysql.cj.jdbc.Driver")
+ }
+}
diff --git a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala b/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
index 55476bfd003..b628654bbdd 100644
--- a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
+++ b/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
@@ -98,6 +98,8 @@ class FlinkOperationSuite extends WithKyuubiServerAndFlinkMiniCluster
req.setSessionHandle(handle)
req.setInfoType(TGetInfoType.CLI_DBMS_NAME)
assert(client.GetInfo(req).getInfoValue.getStringValue === "Apache Flink")
+ req.setInfoType(TGetInfoType.CLI_ODBC_KEYWORDS)
+ assert(client.GetInfo(req).getInfoValue.getStringValue === "Unimplemented")
}
}
}
diff --git a/integration-tests/kyuubi-jdbc-it/pom.xml b/integration-tests/kyuubi-jdbc-it/pom.xml
index 820429cc4a0..8734c853bc9 100644
--- a/integration-tests/kyuubi-jdbc-it/pom.xml
+++ b/integration-tests/kyuubi-jdbc-it/pom.xml
@@ -78,6 +78,18 @@
testcontainers-scala-scalatest_${scala.binary.version}
test
+
+
+ com.dimafeng
+ testcontainers-scala-mysql_${scala.binary.version}
+ test
+
+
+
+ com.mysql
+ mysql-connector-j
+ test
+
diff --git a/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/OperationWithServerSuite.scala b/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/OperationWithServerSuite.scala
new file mode 100644
index 00000000000..263de3d1528
--- /dev/null
+++ b/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/OperationWithServerSuite.scala
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.it.jdbc.mysql
+
+import org.apache.kyuubi.engine.jdbc.mysql.MySQLOperationSuite
+
+class OperationWithServerSuite extends MySQLOperationSuite
+ with WithKyuubiServerAndMySQLContainer {
+
+ override protected def jdbcUrl: String = getJdbcUrl
+
+}
diff --git a/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/WithKyuubiServerAndMySQLContainer.scala b/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/WithKyuubiServerAndMySQLContainer.scala
new file mode 100644
index 00000000000..da94df8e799
--- /dev/null
+++ b/integration-tests/kyuubi-jdbc-it/src/test/scala/org/apache/kyuubi/it/jdbc/mysql/WithKyuubiServerAndMySQLContainer.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.it.jdbc.mysql
+
+import java.nio.file.{Files, Path, Paths}
+
+import org.apache.kyuubi.{Utils, WithKyuubiServer}
+import org.apache.kyuubi.config.KyuubiConf
+import org.apache.kyuubi.config.KyuubiConf.{ENGINE_JDBC_EXTRA_CLASSPATH, KYUUBI_ENGINE_ENV_PREFIX, KYUUBI_HOME}
+import org.apache.kyuubi.engine.jdbc.mysql.WithMySQLEngine
+
+trait WithKyuubiServerAndMySQLContainer extends WithKyuubiServer with WithMySQLEngine {
+
+ private val kyuubiHome: String = Utils
+ .getCodeSourceLocation(getClass).split("integration-tests").head
+
+ private val mysqlJdbcConnectorPath: String = {
+ val keyword = "mysql-connector"
+
+ val jarsDir = Paths.get(kyuubiHome)
+ .resolve("integration-tests")
+ .resolve("kyuubi-jdbc-it")
+ .resolve("target")
+
+ Files.list(jarsDir)
+ .filter { p: Path => p.getFileName.toString contains keyword }
+ .findFirst
+ .orElseThrow { () => new IllegalStateException(s"Can not find $keyword in $jarsDir.") }
+ .toAbsolutePath
+ .toString
+ }
+
+ override protected val conf: KyuubiConf = {
+ KyuubiConf()
+ .set(s"$KYUUBI_ENGINE_ENV_PREFIX.$KYUUBI_HOME", kyuubiHome)
+ .set(ENGINE_JDBC_EXTRA_CLASSPATH, mysqlJdbcConnectorPath)
+ }
+
+ override def beforeAll(): Unit = {
+ val configs = withKyuubiConf
+ configs.foreach(config => conf.set(config._1, config._2))
+ super.beforeAll()
+ }
+}
diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
index accfca4c98f..0144dadbb86 100644
--- a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
+++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
@@ -340,7 +340,7 @@ object Utils extends Logging {
private val PATTERN_FOR_KEY_VALUE_ARG = "(.+?)=(.+)".r
- def redactCommandLineArgs(conf: KyuubiConf, commands: Array[String]): Array[String] = {
+ def redactCommandLineArgs(conf: KyuubiConf, commands: Iterable[String]): Iterable[String] = {
val redactionPattern = conf.get(SERVER_SECRET_REDACTION_PATTERN)
var nextKV = false
commands.map {
diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala
index 2bc27b20c5f..323fd222c86 100644
--- a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala
+++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/KyuubiConf.scala
@@ -2829,9 +2829,28 @@ object KyuubiConf {
val ENGINE_JDBC_CONNECTION_PROVIDER: OptionalConfigEntry[String] =
buildConf("kyuubi.engine.jdbc.connection.provider")
- .doc("The connection provider is used for getting a connection from the server")
+ .doc("A JDBC connection provider plugin for the Kyuubi Server " +
+ "to establish a connection to the JDBC URL." +
+ " The configuration value should be a subclass of " +
+ "`org.apache.kyuubi.engine.jdbc.connection.JdbcConnectionProvider`. " +
+ "Kyuubi provides the following built-in implementations: " +
+ "doris: For establishing Doris connections. " +
+ "mysql: For establishing MySQL connections. " +
+ "phoenix: For establishing Phoenix connections. " +
+ "postgresql: For establishing PostgreSQL connections.")
.version("1.6.0")
.stringConf
+ .transform {
+ case "Doris" | "doris" | "DorisConnectionProvider" =>
+ "org.apache.kyuubi.engine.jdbc.doris.DorisConnectionProvider"
+ case "MySQL" | "mysql" | "MySQLConnectionProvider" =>
+ "org.apache.kyuubi.engine.jdbc.mysql.MySQLConnectionProvider"
+ case "Phoenix" | "phoenix" | "PhoenixConnectionProvider" =>
+ "org.apache.kyuubi.engine.jdbc.phoenix.PhoenixConnectionProvider"
+ case "PostgreSQL" | "postgresql" | "PostgreSQLConnectionProvider" =>
+ "org.apache.kyuubi.engine.jdbc.postgresql.PostgreSQLConnectionProvider"
+ case other => other
+ }
.createOptional
val ENGINE_JDBC_SHORT_NAME: OptionalConfigEntry[String] =
diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/util/JdbcUtils.scala b/kyuubi-common/src/main/scala/org/apache/kyuubi/util/JdbcUtils.scala
index 996589cb742..4951004b671 100644
--- a/kyuubi-common/src/main/scala/org/apache/kyuubi/util/JdbcUtils.scala
+++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/util/JdbcUtils.scala
@@ -98,6 +98,12 @@ object JdbcUtils extends Logging {
}
}
+ def mapResultSet[R](rs: ResultSet)(rowMapper: ResultSet => R): Seq[R] = {
+ val builder = Seq.newBuilder[R]
+ while (rs.next()) builder += rowMapper(rs)
+ builder.result
+ }
+
def redactPassword(password: Option[String]): String = {
password match {
case Some(s) if StringUtils.isNotBlank(s) => s"${"*" * s.length}(length:${s.length})"
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
index 028f755f6c8..61cb48b4cc8 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
@@ -23,6 +23,7 @@ import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.kyuubi.operation.HiveJDBCTestHelper
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.JdbcUtils
/**
* hive tests disabled for JAVA 11
@@ -229,14 +230,11 @@ trait HiveEngineTests extends HiveJDBCTestHelper {
assume(SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_1_8))
withJdbcStatement() { statement =>
val resultSet = statement.getConnection.getMetaData.getTableTypes
- val expected = Set("TABLE", "VIEW", "MATERIALIZED_VIEW")
- var tableTypes = Set[String]()
- while (resultSet.next()) {
- assert(expected.contains(resultSet.getString(TABLE_TYPE)))
- tableTypes += resultSet.getString(TABLE_TYPE)
- }
- assert(!resultSet.next())
- assert(expected.size === tableTypes.size)
+ // Hive3 removes support for INDEX_TABLE
+ val hive2Expected = Set("TABLE", "VIEW", "MATERIALIZED_VIEW", "INDEX_TABLE")
+ val hive3Expected = Set("TABLE", "VIEW", "MATERIALIZED_VIEW")
+ val tableTypes = JdbcUtils.mapResultSet(resultSet) { rs => rs.getString(TABLE_TYPE) }.toSet
+ assert(tableTypes === hive2Expected || tableTypes === hive3Expected)
}
}
@@ -387,10 +385,12 @@ trait HiveEngineTests extends HiveJDBCTestHelper {
assert(typeInfo.getInt(DATA_TYPE) === java.sql.Types.TIMESTAMP)
typeInfo.next()
- assert(typeInfo.getString(TYPE_NAME) === "TIMESTAMP WITH LOCAL TIME ZONE")
- assert(typeInfo.getInt(DATA_TYPE) === java.sql.Types.OTHER)
+ // Hive3 supports TIMESTAMP WITH LOCAL TIME ZONE
+ if (typeInfo.getString(TYPE_NAME) == "TIMESTAMP WITH LOCAL TIME ZONE") {
+ assert(typeInfo.getInt(DATA_TYPE) === java.sql.Types.OTHER)
+ typeInfo.next()
+ }
- typeInfo.next()
assert(typeInfo.getString(TYPE_NAME) === "INTERVAL_YEAR_MONTH")
assert(typeInfo.getInt(DATA_TYPE) === java.sql.Types.OTHER)
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
index 5973fc6e7a6..97d9cd1b552 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
@@ -167,7 +167,7 @@ class UtilsSuite extends KyuubiFunSuite {
buffer += "--conf"
buffer += "kyuubi.regular.property2=regular_value"
- val commands = buffer.toArray
+ val commands = buffer
// Redact sensitive information
val redactedCmdArgs = Utils.redactCommandLineArgs(conf, commands)
@@ -183,7 +183,7 @@ class UtilsSuite extends KyuubiFunSuite {
expectBuffer += "--conf"
expectBuffer += "kyuubi.regular.property2=regular_value"
- assert(expectBuffer.toArray === redactedCmdArgs)
+ assert(expectBuffer === redactedCmdArgs)
}
test("redact sensitive information") {
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationAuditLogger.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationAuditLogger.scala
index 64569f7d82a..565c8a694e5 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationAuditLogger.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationAuditLogger.scala
@@ -17,6 +17,8 @@
package org.apache.kyuubi.engine
+import scala.collection.JavaConverters._
+
import io.fabric8.kubernetes.api.model.Pod
import org.apache.kyuubi.Logging
@@ -39,6 +41,11 @@ object KubernetesApplicationAuditLogger extends Logging {
sb.append(s"context=${kubernetesInfo.context.orNull}").append("\t")
sb.append(s"namespace=${kubernetesInfo.namespace.orNull}").append("\t")
sb.append(s"pod=${pod.getMetadata.getName}").append("\t")
+ sb.append(s"podState=${pod.getStatus.getPhase}").append("\t")
+ val containerStatuses = pod.getStatus.getContainerStatuses.asScala.map { containerState =>
+ s"${containerState.getName}->${containerState.getState}"
+ }.mkString("[", ",", "]")
+ sb.append(s"containers=$containerStatuses").append("\t")
sb.append(s"appId=${pod.getMetadata.getLabels.get(SPARK_APP_ID_LABEL)}").append("\t")
val (appState, appError) =
toApplicationStateAndError(pod, appStateSource, appStateContainer)
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationOperation.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationOperation.scala
index c8828f5d83c..922dd9a1597 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationOperation.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/KubernetesApplicationOperation.scala
@@ -241,15 +241,15 @@ class KubernetesApplicationOperation extends ApplicationOperation with Logging {
}
enginePodInformers.clear()
- kubernetesClients.asScala.foreach { case (_, client) =>
- Utils.tryLogNonFatalError(client.close())
- }
- kubernetesClients.clear()
-
if (cleanupTerminatedAppInfoTrigger != null) {
cleanupTerminatedAppInfoTrigger.cleanUp()
cleanupTerminatedAppInfoTrigger = null
}
+
+ kubernetesClients.asScala.foreach { case (_, client) =>
+ Utils.tryLogNonFatalError(client.close())
+ }
+ kubernetesClients.clear()
}
private class SparkEnginePodEventHandler(kubernetesInfo: KubernetesInfo)
@@ -341,16 +341,18 @@ object KubernetesApplicationOperation extends Logging {
pod: Pod,
appStateSource: KubernetesApplicationStateSource,
appStateContainer: String): (ApplicationState, Option[String]) = {
+ val podName = pod.getMetadata.getName
val containerStateToBuildAppState = appStateSource match {
case KubernetesApplicationStateSource.CONTAINER =>
pod.getStatus.getContainerStatuses.asScala
- .find(_.getState == appStateContainer).map(_.getState)
+ .find(cs => appStateContainer.equalsIgnoreCase(cs.getName)).map(_.getState)
case KubernetesApplicationStateSource.POD => None
}
val applicationState = containerStateToBuildAppState.map(containerStateToApplicationState)
.getOrElse(podStateToApplicationState(pod.getStatus.getPhase))
- val applicationError = containerStateToBuildAppState.map(containerStateToApplicationError)
- .getOrElse(Option(pod.getStatus.getReason))
+ val applicationError = containerStateToBuildAppState
+ .map(cs => containerStateToApplicationError(cs).map(r => s"$podName/$appStateContainer[$r]"))
+ .getOrElse(Option(pod.getStatus.getReason).map(r => s"$podName[$r]"))
applicationState -> applicationError
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
index 84807a62d87..23196bf1ded 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
@@ -99,7 +99,7 @@ trait ProcBuilder {
protected def proxyUser: String
- protected val commands: Array[String]
+ protected val commands: Iterable[String]
def conf: KyuubiConf
@@ -142,7 +142,7 @@ trait ProcBuilder {
}
final lazy val processBuilder: ProcessBuilder = {
- val pb = new ProcessBuilder(commands: _*)
+ val pb = new ProcessBuilder(commands.toStream.asJava)
val envs = pb.environment()
envs.putAll(env.asJava)
@@ -287,10 +287,10 @@ trait ProcBuilder {
override def toString: String = {
if (commands == null) {
- super.toString()
+ super.toString
} else {
Utils.redactCommandLineArgs(conf, commands).map {
- case arg if arg.startsWith("--") => s"\\\n\t$arg"
+ case arg if arg.startsWith("-") => s"\\\n\t$arg"
case arg => arg
}.mkString(" ")
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/chat/ChatProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/chat/ChatProcessBuilder.scala
index 3e4a20de373..ade6026b18f 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/chat/ChatProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/chat/ChatProcessBuilder.scala
@@ -59,7 +59,7 @@ class ChatProcessBuilder(
*/
override protected def mainClass: String = "org.apache.kyuubi.engine.chat.ChatEngine"
- override protected val commands: Array[String] = {
+ override protected val commands: Iterable[String] = {
val buffer = new ArrayBuffer[String]()
buffer += executable
@@ -98,18 +98,20 @@ class ChatProcessBuilder(
buffer += "--conf"
buffer += s"$k=$v"
}
- buffer.toArray
+ buffer
}
override def toString: String = {
if (commands == null) {
- super.toString()
+ super.toString
} else {
Utils.redactCommandLineArgs(conf, commands).map {
- case arg if arg.startsWith("-") || arg == mainClass => s"\\\n\t$arg"
case arg if arg.contains(ENGINE_CHAT_GPT_API_KEY.key) =>
s"${ENGINE_CHAT_GPT_API_KEY.key}=$REDACTION_REPLACEMENT_TEXT"
case arg => arg
+ }.map {
+ case arg if arg.startsWith("-") || arg == mainClass => s"\\\n\t$arg"
+ case arg => arg
}.mkString(" ")
}
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilder.scala
index f43adfbc216..52364f1894c 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilder.scala
@@ -77,7 +77,7 @@ class FlinkProcessBuilder(
ApplicationManagerInfo(clusterManager())
}
- override protected val commands: Array[String] = {
+ override protected val commands: Iterable[String] = {
KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf)
// unset engine credentials because Flink doesn't support them at the moment
conf.unset(KyuubiReservedKeys.KYUUBI_ENGINE_CREDENTIALS_KEY)
@@ -142,8 +142,7 @@ class FlinkProcessBuilder(
buffer += s"$k=$v"
}
}
-
- buffer.toArray
+ buffer
case _ =>
val buffer = new ArrayBuffer[String]()
@@ -211,7 +210,7 @@ class FlinkProcessBuilder(
buffer += "--conf"
buffer += s"$k=$v"
}
- buffer.toArray
+ buffer
}
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala
index 61fe55887ea..d7e2709119f 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilder.scala
@@ -52,7 +52,7 @@ class HiveProcessBuilder(
override protected def mainClass: String = "org.apache.kyuubi.engine.hive.HiveSQLEngine"
- override protected val commands: Array[String] = {
+ override protected val commands: Iterable[String] = {
KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf)
val buffer = new ArrayBuffer[String]()
buffer += executable
@@ -113,11 +113,9 @@ class HiveProcessBuilder(
buffer += "--conf"
buffer += s"$k=$v"
}
- buffer.toArray
+ buffer
}
- override def toString: String = Utils.redactCommandLineArgs(conf, commands).mkString("\n")
-
override def shortName: String = "hive"
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilder.scala
index 14ad53b20a8..5b52dbbb471 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilder.scala
@@ -59,7 +59,7 @@ class JdbcProcessBuilder(
*/
override protected def mainClass: String = "org.apache.kyuubi.engine.jdbc.JdbcSQLEngine"
- override protected val commands: Array[String] = {
+ override protected val commands: Iterable[String] = {
require(
conf.get(ENGINE_JDBC_CONNECTION_URL).nonEmpty,
s"Jdbc server url can not be null! Please set ${ENGINE_JDBC_CONNECTION_URL.key}")
@@ -101,18 +101,21 @@ class JdbcProcessBuilder(
buffer += "--conf"
buffer += s"$k=$v"
}
- buffer.toArray
+ buffer
}
override def toString: String = {
if (commands == null) {
- super.toString()
+ super.toString
} else {
Utils.redactCommandLineArgs(conf, commands).map {
case arg if arg.contains(ENGINE_JDBC_CONNECTION_PASSWORD.key) =>
s"${ENGINE_JDBC_CONNECTION_PASSWORD.key}=$REDACTION_REPLACEMENT_TEXT"
case arg => arg
- }.mkString("\n")
+ }.map {
+ case arg if arg.startsWith("-") => s"\\\n\t$arg"
+ case arg => arg
+ }.mkString(" ")
}
}
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
index ef159bb93ad..7d69b90d5db 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
@@ -36,7 +36,7 @@ class SparkBatchProcessBuilder(
extends SparkProcessBuilder(proxyUser, conf, batchId, extraEngineLog) {
import SparkProcessBuilder._
- override protected lazy val commands: Array[String] = {
+ override protected lazy val commands: Iterable[String] = {
val buffer = new ArrayBuffer[String]()
buffer += executable
Option(mainClass).foreach { cla =>
@@ -66,7 +66,7 @@ class SparkBatchProcessBuilder(
batchArgs.foreach { arg => buffer += arg }
- buffer.toArray
+ buffer
}
private def sparkAppNameConf(): Map[String, String] = {
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
index 086ca057de8..57d5f73357d 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
@@ -122,7 +122,7 @@ class SparkProcessBuilder(
file.isDirectory && r.findFirstMatchIn(file.getName).isDefined
}
- override protected lazy val commands: Array[String] = {
+ override protected lazy val commands: Iterable[String] = {
// complete `spark.master` if absent on kubernetes
completeMasterUrl(conf)
@@ -149,7 +149,7 @@ class SparkProcessBuilder(
mainResource.foreach { r => buffer += r }
- buffer.toArray
+ buffer
}
override protected def module: String = "kyuubi-spark-sql-engine"
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilder.scala
index 041219dd0fb..04dc49e037a 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilder.scala
@@ -50,7 +50,7 @@ class TrinoProcessBuilder(
override protected def mainClass: String = "org.apache.kyuubi.engine.trino.TrinoSqlEngine"
- override protected val commands: Array[String] = {
+ override protected val commands: Iterable[String] = {
KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf)
require(
conf.get(ENGINE_TRINO_CONNECTION_URL).nonEmpty,
@@ -104,14 +104,14 @@ class TrinoProcessBuilder(
buffer += "--conf"
buffer += s"$k=$v"
}
- buffer.toArray
+ buffer
}
override def shortName: String = "trino"
override def toString: String = {
if (commands == null) {
- super.toString()
+ super.toString
} else {
Utils.redactCommandLineArgs(conf, commands).map {
case arg if arg.contains(ENGINE_TRINO_CONNECTION_PASSWORD.key) =>
@@ -121,7 +121,10 @@ class TrinoProcessBuilder(
case arg if arg.contains(ENGINE_TRINO_CONNECTION_TRUSTSTORE_PASSWORD.key) =>
s"${ENGINE_TRINO_CONNECTION_TRUSTSTORE_PASSWORD.key}=$REDACTION_REPLACEMENT_TEXT"
case arg => arg
- }.mkString("\n")
+ }.map {
+ case arg if arg.startsWith("-") => s"\\\n\t$arg"
+ case arg => arg
+ }.mkString(" ")
}
}
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionManager.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionManager.scala
index 72e2eb9677b..524d1ef9a1d 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionManager.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSessionManager.scala
@@ -397,17 +397,18 @@ class KyuubiSessionManager private (name: String) extends SessionManager(name) {
private def startEngineAliveChecker(): Unit = {
val interval = conf.get(KyuubiConf.ENGINE_ALIVE_PROBE_INTERVAL)
val checkTask: Runnable = () => {
- allSessions().foreach { session =>
- if (!session.asInstanceOf[KyuubiSessionImpl].checkEngineConnectionAlive()) {
+ allSessions().foreach {
+ case session: KyuubiSessionImpl =>
try {
- closeSession(session.handle)
- logger.info(s"The session ${session.handle} has been closed " +
- s"due to engine unresponsiveness (checked by the engine alive checker).")
+ if (!session.checkEngineConnectionAlive()) {
+ closeSession(session.handle)
+ logger.info(s"The session ${session.handle} has been closed " +
+ s"due to engine unresponsiveness (checked by the engine alive checker).")
+ }
} catch {
- case e: KyuubiSQLException =>
- warn(s"Error closing session ${session.handle}", e)
+ case e: Throwable => warn(s"Error closing session ${session.handle}", e)
}
- }
+ case _ =>
}
}
scheduleTolerableRunnableWithFixedDelay(
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilderSuite.scala
index 26e355a87bd..84be010ed4b 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilderSuite.scala
@@ -81,8 +81,11 @@ class FlinkProcessBuilderSuite extends KyuubiFunSuite {
val actualCommands = builder.toString
val classpathStr = constructClasspathStr(builder)
val expectedCommands =
- s"$javaPath -Xmx512m -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005 " +
- s"-cp $classpathStr $mainClassStr \\\\\\n\\t--conf kyuubi.session.user=vinoyang $confStr"
+ s"""$javaPath \\\\
+ |\\t-Xmx512m \\\\
+ |\\t-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005 \\\\
+ |\\t-cp $classpathStr $mainClassStr \\\\
+ |\\t--conf kyuubi.session.user=vinoyang $confStr""".stripMargin
val regex = new Regex(expectedCommands)
val matcher = regex.pattern.matcher(actualCommands)
assert(matcher.matches())
@@ -90,19 +93,20 @@ class FlinkProcessBuilderSuite extends KyuubiFunSuite {
private def matchActualAndExpectedApplicationMode(builder: FlinkProcessBuilder): Unit = {
val actualCommands = builder.toString
+ // scalastyle:off line.size.limit
val expectedCommands =
- escapePaths(s"${builder.flinkExecutable} run-application ") +
- s"-t yarn-application " +
- s"-Dyarn.ship-files=.*\\/flink-sql-client.*jar;.*\\/flink-sql-gateway.*jar;$tempUdfJar" +
- s";.*\\/hive-site\\.xml " +
- s"-Dyarn\\.application\\.name=kyuubi_.* " +
- s"-Dyarn\\.tags=KYUUBI " +
- s"-Dcontainerized\\.master\\.env\\.FLINK_CONF_DIR=\\. " +
- s"-Dcontainerized\\.master\\.env\\.HIVE_CONF_DIR=\\. " +
- s"-Dexecution.target=yarn-application " +
- s"-c org\\.apache\\.kyuubi\\.engine\\.flink\\.FlinkSQLEngine " +
- s".*kyuubi-flink-sql-engine_.*jar" +
- s"(?: \\\\\\n\\t--conf \\S+=\\S+)+"
+ escapePaths(
+ s"""${builder.flinkExecutable} run-application \\\\
+ |\\t-t yarn-application \\\\
+ |\\t-Dyarn.ship-files=.*flink-sql-client.*jar;.*flink-sql-gateway.*jar;$tempUdfJar;.*hive-site.xml \\\\
+ |\\t-Dyarn.application.name=kyuubi_.* \\\\
+ |\\t-Dyarn.tags=KYUUBI \\\\
+ |\\t-Dcontainerized.master.env.FLINK_CONF_DIR=. \\\\
+ |\\t-Dcontainerized.master.env.HIVE_CONF_DIR=. \\\\
+ |\\t-Dexecution.target=yarn-application \\\\
+ |\\t-c org.apache.kyuubi.engine.flink.FlinkSQLEngine .*kyuubi-flink-sql-engine_.*jar""".stripMargin +
+ "(?: \\\\\\n\\t--conf \\S+=\\S+)+")
+ // scalastyle:on line.size.limit
val regex = new Regex(expectedCommands)
val matcher = regex.pattern.matcher(actualCommands)
assert(matcher.matches())
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilderSuite.scala
index bb9884dfa4b..a2f39633ca4 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/hive/HiveProcessBuilderSuite.scala
@@ -30,18 +30,18 @@ class HiveProcessBuilderSuite extends KyuubiFunSuite {
override def env: Map[String, String] = super.env + (HIVE_HADOOP_CLASSPATH_KEY -> "/hadoop")
}
val commands = builder.toString.split('\n')
- assert(commands.head.endsWith("bin/java"), "wrong exec")
- assert(builder.toString.contains("--conf\nkyuubi.session.user=kyuubi"))
+ assert(commands.head.contains("bin/java"), "wrong exec")
+ assert(builder.toString.contains("--conf kyuubi.session.user=kyuubi"))
assert(commands.exists(ss => ss.contains("kyuubi-hive-sql-engine")), "wrong classpath")
- assert(builder.toString.contains("--conf\nkyuubi.on=off"))
+ assert(builder.toString.contains("--conf kyuubi.on=off"))
}
test("default engine memory") {
val conf = KyuubiConf()
.set(ENGINE_HIVE_EXTRA_CLASSPATH, "/hadoop")
val builder = new HiveProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split('\n')
- assert(commands.contains("-Xmx1g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx1g"))
}
test("set engine memory") {
@@ -49,8 +49,8 @@ class HiveProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_HIVE_MEMORY, "5g")
.set(ENGINE_HIVE_EXTRA_CLASSPATH, "/hadoop")
val builder = new HiveProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split('\n')
- assert(commands.contains("-Xmx5g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx5g"))
}
test("set engine java opts") {
@@ -60,8 +60,8 @@ class HiveProcessBuilderSuite extends KyuubiFunSuite {
ENGINE_HIVE_JAVA_OPTIONS,
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005")
val builder = new HiveProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split('\n')
- assert(commands.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
+ val command = builder.toString
+ assert(command.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
}
test("set engine extra classpath") {
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
index f85e363d39e..2be39d0f319 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
@@ -27,13 +27,13 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_JDBC_CONNECTION_URL.key, "")
.set(ENGINE_JDBC_CONNECTION_PASSWORD.key, "123456")
val builder = new JdbcProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.head.endsWith("bin/java"), "wrong exec")
- assert(builder.toString.contains("--conf\nkyuubi.session.user=kyuubi"))
- assert(commands.exists(ss => ss.contains("kyuubi-jdbc-engine")), "wrong classpath")
- assert(builder.toString.contains("--conf\nkyuubi.on=off"))
- assert(builder.toString.contains(
- "--conf\nkyuubi.engine.jdbc.connection.password=*********(redacted)"))
+ val command = builder.toString
+ assert(command.contains("bin/java"), "wrong exec")
+ assert(command.contains("--conf kyuubi.session.user=kyuubi"))
+ assert(command.contains("kyuubi-jdbc-engine"), "wrong classpath")
+ assert(command.contains("--conf kyuubi.on=off"))
+ assert(command.contains(
+ "--conf kyuubi.engine.jdbc.connection.password=*********(redacted)"))
}
test("capture error from jdbc process builder") {
@@ -47,8 +47,8 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
val conf = KyuubiConf()
.set(ENGINE_JDBC_CONNECTION_URL.key, "")
val builder = new JdbcProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-Xmx1g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx1g"))
}
test("set engine memory") {
@@ -56,8 +56,8 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_JDBC_MEMORY, "5g")
.set(ENGINE_JDBC_CONNECTION_URL.key, "")
val builder = new JdbcProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-Xmx5g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx5g"))
}
test("set engine java options") {
@@ -67,8 +67,8 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
ENGINE_JDBC_JAVA_OPTIONS,
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005")
val builder = new JdbcProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
+ val command = builder.toString
+ assert(command.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
}
test("set extra classpath") {
@@ -76,7 +76,7 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_JDBC_CONNECTION_URL.key, "")
.set(ENGINE_JDBC_EXTRA_CLASSPATH, "/dummy_classpath/*")
val builder = new JdbcProcessBuilder("kyuubi", conf)
- val commands = builder.toString
- assert(commands.contains("/dummy_classpath/*"))
+ val command = builder.toString
+ assert(command.contains("/dummy_classpath/*"))
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index 7bbe4ad0670..27fd36815f8 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -427,5 +427,5 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper with MockitoSugar {
class FakeSparkProcessBuilder(config: KyuubiConf)
extends SparkProcessBuilder("fake", config) {
- override protected lazy val commands: Array[String] = Array("ls")
+ override protected lazy val commands: Iterable[String] = Seq("ls")
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
index 2c37c41bc4b..a4dfad186a1 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
@@ -30,11 +30,11 @@ class TrinoProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_TRINO_CONNECTION_CATALOG, "dummy_catalog")
val builder = new TrinoProcessBuilder("kyuubi", conf)
val commands = builder.toString.split("\n")
- assert(commands.head.endsWith("java"))
- assert(builder.toString.contains(s"--conf\n${KYUUBI_SESSION_USER_KEY}=kyuubi"))
- assert(builder.toString.contains(s"--conf\n${ENGINE_TRINO_CONNECTION_URL.key}=dummy_url"))
+ assert(commands.head.contains("java"))
+ assert(builder.toString.contains(s"--conf ${KYUUBI_SESSION_USER_KEY}=kyuubi"))
+ assert(builder.toString.contains(s"--conf ${ENGINE_TRINO_CONNECTION_URL.key}=dummy_url"))
assert(builder.toString.contains(
- s"--conf\n${ENGINE_TRINO_CONNECTION_CATALOG.key}=dummy_catalog"))
+ s"--conf ${ENGINE_TRINO_CONNECTION_CATALOG.key}=dummy_catalog"))
}
test("capture error from trino process builder") {
@@ -49,8 +49,8 @@ class TrinoProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_TRINO_CONNECTION_URL, "dummy_url")
.set(ENGINE_TRINO_CONNECTION_CATALOG, "dummy_catalog")
val builder = new TrinoProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-Xmx1g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx1g"))
}
test("set engine memory") {
@@ -59,8 +59,8 @@ class TrinoProcessBuilderSuite extends KyuubiFunSuite {
.set(ENGINE_TRINO_CONNECTION_CATALOG, "dummy_catalog")
.set(ENGINE_TRINO_MEMORY, "5g")
val builder = new TrinoProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-Xmx5g"))
+ val command = builder.toString
+ assert(command.contains("-Xmx5g"))
}
test("set engine java options") {
@@ -71,8 +71,8 @@ class TrinoProcessBuilderSuite extends KyuubiFunSuite {
ENGINE_TRINO_JAVA_OPTIONS,
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005")
val builder = new TrinoProcessBuilder("kyuubi", conf)
- val commands = builder.toString.split("\n")
- assert(commands.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
+ val command = builder.toString
+ assert(command.contains("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"))
}
test("set extra classpath") {
diff --git a/pom.xml b/pom.xml
index ace19c289df..b7ba11018e1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -562,6 +562,12 @@
${testcontainers-scala.version}
+
+ com.dimafeng
+ testcontainers-scala-mysql_${scala.binary.version}
+ ${testcontainers-scala.version}
+
+
com.dimafeng
testcontainers-scala-trino_${scala.binary.version}