diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index b555bbcf8be..8e55009e275 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -13,12 +13,7 @@ } ], "opType" : "ALTERTABLE_ADDCOLS", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "child", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.AddPartitions", "tableDescs" : [ { @@ -50,12 +45,7 @@ } ], "opType" : "ALTERTABLE_ADDCOLS", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "child", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.AlterTable", "tableDescs" : [ { @@ -71,12 +61,7 @@ } ], "opType" : "ALTERTABLE_PROPERTIES", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "ident", - "fieldExtractor" : "IdentifierURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.AppendData", "tableDescs" : [ { @@ -334,12 +319,7 @@ } ], "opType" : "ALTERTABLE_ADDCOLS", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "child", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropPartitions", "tableDescs" : [ { @@ -498,12 +478,7 @@ } ], "opType" : "ALTERTABLE_RENAMECOL", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "child", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.RenamePartitions", "tableDescs" : [ { @@ -551,12 +526,7 @@ } ], "opType" : "ALTERTABLE_REPLACECOLS", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "child", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceData", "tableDescs" : [ { @@ -706,12 +676,7 @@ } ], "opType" : "ALTERTABLE_PROPERTIES", "queryDescs" : [ ], - "uriDescs" : [ { - "fieldName" : "table", - "fieldExtractor" : "ResolvedTableURIExtractor", - "isInput" : false, - "comment" : "" - } ] + "uriDescs" : [ ] }, { "classname" : "org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable", "tableDescs" : [ { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala index aced937b9a6..ee0345a8c26 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala @@ -39,8 +39,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val AlterTable = { val cmd = "org.apache.spark.sql.catalyst.plans.logical.AlterTable" val tableDesc = TableDesc("ident", classOf[IdentifierTableExtractor]) - val uriDescs = Seq(UriDesc("ident", classOf[IdentifierURIExtractor])) - TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs = uriDescs) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES) } val AlterTableAddColumns = { @@ -52,8 +51,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val AddColumns = { val cmd = "org.apache.spark.sql.catalyst.plans.logical.AddColumns" - val uriDescs = Seq(UriDesc("child", classOf[ResolvedTableURIExtractor])) - TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS, uriDescs = uriDescs) + TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS) } val AlterColumn = { @@ -68,12 +66,12 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val ReplaceColumns = { val cmd = "org.apache.spark.sql.catalyst.plans.logical.ReplaceColumns" - AddColumns.copy(classname = cmd, opType = ALTERTABLE_REPLACECOLS) + TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_REPLACECOLS) } val RenameColumn = { val cmd = "org.apache.spark.sql.catalyst.plans.logical.RenameColumn" - AddColumns.copy(classname = cmd, opType = ALTERTABLE_RENAMECOL) + TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_RENAMECOL) } val AlterTableAddPartition = { @@ -640,8 +638,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val SetTableProperties = { val cmd = "org.apache.spark.sql.catalyst.plans.logical.SetTableProperties" val tableDesc = TableDesc("table", classOf[ResolvedTableTableExtractor]) - val uriDescs = Seq(UriDesc("table", classOf[ResolvedTableURIExtractor])) - TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs = uriDescs) + TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES) } val AddArchivesCommand = { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala index 1ce8ad6765f..c1dda69896a 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala @@ -25,7 +25,7 @@ import org.apache.kyuubi.plugin.spark.authz.AccessControlException import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._ import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._ import org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._ -import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.{isSparkV32OrGreater, isSparkV35OrGreater} +import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.isSparkV32OrGreater import org.apache.kyuubi.tags.DeltaTest import org.apache.kyuubi.util.AssertionUtils._ @@ -41,14 +41,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { val table1 = "table1_delta" val table2 = "table2_delta" - def propString(props: Map[String, String]): String = - if (props.isEmpty) "" - else { - props - .map { case (key, value) => s"'$key' = '$value'" } - .mkString("TBLPROPERTIES (", ",", ")") - } - def createTableSql(namespace: String, table: String): String = s""" |CREATE TABLE IF NOT EXISTS $namespace.$table ( @@ -61,7 +53,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { |PARTITIONED BY (gender) |""".stripMargin - def createPathBasedTableSql(path: Path, props: Map[String, String] = Map.empty): String = + def createPathBasedTableSql(path: Path): String = s""" |CREATE TABLE IF NOT EXISTS delta.`$path` ( | id INT, @@ -71,7 +63,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { |) |USING DELTA |PARTITIONED BY (gender) - |${propString(props)} |""".stripMargin override def withFixture(test: NoArgTest): Outcome = { @@ -477,95 +468,6 @@ class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { doAs(admin, sql(vacuumTableSql2)) }) } - - test("alter path-based table set properties") { - withTempDir(path => { - doAs(admin, sql(createPathBasedTableSql(path))) - val setPropertiesSql = s"ALTER TABLE delta.`$path`" + - s" SET TBLPROPERTIES ('delta.appendOnly' = 'true')" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(setPropertiesSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - doAs(admin, sql(setPropertiesSql)) - }) - } - - test("alter path-based table add columns") { - withTempDir(path => { - doAs(admin, sql(createPathBasedTableSql(path))) - val addColumnsSql = s"ALTER TABLE delta.`$path` ADD COLUMNS (age int)" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(addColumnsSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - doAs(admin, sql(addColumnsSql)) - }) - } - - test("alter path-based table change column") { - withTempDir(path => { - doAs(admin, sql(createPathBasedTableSql(path))) - val changeColumnSql = s"ALTER TABLE delta.`$path`" + - s" CHANGE COLUMN gender gender STRING AFTER birthDate" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(changeColumnSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - doAs(admin, sql(changeColumnSql)) - }) - } - - test("alter path-based table drop column") { - assume( - isSparkV32OrGreater, - "alter table drop column is available in Delta Lake 1.2.0 and above") - - withTempDir(path => { - doAs(admin, sql(createPathBasedTableSql(path, Map("delta.columnMapping.mode" -> "name")))) - val dropColumnSql = s"ALTER TABLE delta.`$path` DROP COLUMN birthDate" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(dropColumnSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - doAs(admin, sql(dropColumnSql)) - }) - } - - test("alter path-based table rename column") { - assume( - isSparkV32OrGreater, - "alter table rename column is available in Delta Lake 1.2.0 and above") - - withTempDir(path => { - doAs(admin, sql(createPathBasedTableSql(path, Map("delta.columnMapping.mode" -> "name")))) - val renameColumnSql = s"ALTER TABLE delta.`$path`" + - s" RENAME COLUMN birthDate TO dateOfBirth" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(renameColumnSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - doAs(admin, sql(renameColumnSql)) - }) - } - - test("alter path-based table replace columns") { - withTempDir(path => { - assume( - isSparkV32OrGreater, - "alter table replace columns is not available in Delta Lake 1.0.1") - - doAs(admin, sql(createPathBasedTableSql(path, Map("delta.columnMapping.mode" -> "name")))) - val replaceColumnsSql = s"ALTER TABLE delta.`$path`" + - s" REPLACE COLUMNS (id INT, name STRING, gender STRING)" - interceptEndsWith[AccessControlException]( - doAs(someone, sql(replaceColumnsSql)))( - s"does not have [write] privilege on [[$path, $path/]]") - - // There was a bug before Delta Lake 3.0, it will throw AnalysisException message - // "Cannot drop column from a struct type with a single field: - // StructType(StructField(birthDate,TimestampType,true))". - // For details, see https://github.com/delta-io/delta/pull/1822 - if (isSparkV35OrGreater) { - doAs(admin, sql(replaceColumnsSql)) - } - }) - } } object DeltaCatalogRangerSparkExtensionSuite {