From 37006869be990a1f33c38eedc5aae2ded60d58ec Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Thu, 19 Oct 2023 20:26:26 +0800 Subject: [PATCH 1/6] [KYUUBI #5447][AUTHZ] Support hudi DeleteHoodieTableCommand/UpdateHoodieTableCommand/MergeIntoHoodieTableCommand --- ...bi.plugin.spark.authz.serde.QueryExtractor | 1 + ...bi.plugin.spark.authz.serde.TableExtractor | 2 + .../main/resources/table_command_spec.json | 63 +++++++++++++++++++ .../spark/authz/serde/queryExtractors.scala | 8 +++ .../spark/authz/serde/tableExtractors.scala | 37 +++++++++++ .../plugin/spark/authz/gen/HudiCommands.scala | 40 +++++++++++- ...HudiCatalogRangerSparkExtensionSuite.scala | 62 ++++++++++++++++++ 7 files changed, 212 insertions(+), 1 deletion(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor index c659114f944..3916dacc804 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor @@ -17,3 +17,4 @@ org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanOptionQueryExtractor org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanQueryExtractor +org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoSourceTableExtractor \ No newline at end of file diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor index 78f836c65cd..d4f02e1a539 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor @@ -27,3 +27,5 @@ org.apache.kyuubi.plugin.spark.authz.serde.ResolvedTableTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.StringTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.TableIdentifierTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.TableTableExtractor +org.apache.kyuubi.plugin.spark.authz.serde.HudiDataSourceV2RelationTableExtractor +org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoTargetTableExtractor diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index 2d7199ff92f..95e00f7213b 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1604,6 +1604,27 @@ } ], "opType" : "CREATETABLE", "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.DeleteHoodieTableCommand", + "tableDescs" : [ { + "fieldName" : "dft", + "fieldExtractor" : "HudiDataSourceV2RelationTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : { + "fieldName" : null, + "fieldExtractor" : null, + "actionType" : "UPDATE" + }, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "QUERY", + "queryDescs" : [ { + "fieldName" : "query", + "fieldExtractor" : "LogicalPlanQueryExtractor" + } ] }, { "classname" : "org.apache.spark.sql.hudi.command.DropHoodieTableCommand", "tableDescs" : [ { @@ -1643,6 +1664,27 @@ "fieldName" : "query", "fieldExtractor" : "LogicalPlanQueryExtractor" } ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand", + "tableDescs" : [ { + "fieldName" : "mergeInto", + "fieldExtractor" : "HudiMergeIntoTargetTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : { + "fieldName" : null, + "fieldExtractor" : null, + "actionType" : "UPDATE" + }, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "QUERY", + "queryDescs" : [ { + "fieldName" : "mergeInto", + "fieldExtractor" : "HudiMergeIntoSourceTableExtractor" + } ] }, { "classname" : "org.apache.spark.sql.hudi.command.RepairHoodieTableCommand", "tableDescs" : [ { @@ -1688,4 +1730,25 @@ } ], "opType" : "TRUNCATETABLE", "queryDescs" : [ ] +}, { + "classname" : "org.apache.spark.sql.hudi.command.UpdateHoodieTableCommand", + "tableDescs" : [ { + "fieldName" : "ut", + "fieldExtractor" : "HudiDataSourceV2RelationTableExtractor", + "columnDesc" : null, + "actionTypeDesc" : { + "fieldName" : null, + "fieldExtractor" : null, + "actionType" : "UPDATE" + }, + "tableTypeDesc" : null, + "catalogDesc" : null, + "isInput" : false, + "setCurrentDatabaseIfMissing" : false + } ], + "opType" : "QUERY", + "queryDescs" : [ { + "fieldName" : "query", + "fieldExtractor" : "LogicalPlanQueryExtractor" + } ] } ] \ No newline at end of file diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala index f6fc19ac280..4ac87e100e4 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala @@ -19,6 +19,8 @@ package org.apache.kyuubi.plugin.spark.authz.serde import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.kyuubi.util.reflect.ReflectUtils.invokeAs + trait QueryExtractor extends (AnyRef => Option[LogicalPlan]) with Extractor object QueryExtractor { @@ -44,3 +46,9 @@ class LogicalPlanOptionQueryExtractor extends QueryExtractor { v1.asInstanceOf[Option[LogicalPlan]] } } + +class HudiMergeIntoSourceTableExtractor extends QueryExtractor { + override def apply(v1: AnyRef): Option[LogicalPlan] = { + new LogicalPlanQueryExtractor().apply(invokeAs[LogicalPlan](v1, "sourceTable")) + } +} diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala index 57eab9634f7..3cf643018c1 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala @@ -20,6 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.serde import java.util.{Map => JMap} import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.TableIdentifier @@ -27,6 +28,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.kyuubi.plugin.spark.authz.{PrivilegeObject, PrivilegesBuilder} import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._ import org.apache.kyuubi.util.reflect.ReflectUtils._ @@ -240,3 +242,38 @@ class TableTableExtractor extends TableExtractor { lookupExtractor[StringTableExtractor].apply(spark, tableName) } } + +class HudiDataSourceV2RelationTableExtractor extends TableExtractor { + override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = { + val outputObjs = new ArrayBuffer[PrivilegeObject] + PrivilegesBuilder.buildQuery(invokeAs[LogicalPlan](v1, "table"), outputObjs, spark = spark) + if (outputObjs.isEmpty) { + None + } else { + Option(Table( + outputObjs.head.catalog, + Option(outputObjs.head.dbname), + outputObjs.head.objectName, + outputObjs.head.owner)) + } + } +} + +class HudiMergeIntoTargetTableExtractor extends TableExtractor { + override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = { + val outputObjs = new ArrayBuffer[PrivilegeObject] + PrivilegesBuilder.buildQuery( + invokeAs[LogicalPlan](v1, "targetTable"), + outputObjs, + spark = spark) + if (outputObjs.isEmpty) { + None + } else { + Option(Table( + outputObjs.head.catalog, + Option(outputObjs.head.dbname), + outputObjs.head.objectName, + outputObjs.head.owner)) + } + } +} diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala index 0b19204f530..5bd7a683780 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala @@ -18,6 +18,7 @@ package org.apache.kyuubi.plugin.spark.authz.gen import org.apache.kyuubi.plugin.spark.authz.OperationType._ +import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._ import org.apache.kyuubi.plugin.spark.authz.serde._ import org.apache.kyuubi.plugin.spark.authz.serde.TableType._ @@ -154,6 +155,40 @@ object HudiCommands { TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query"))) } + val DeleteHoodieTableCommand = { + val cmd = "org.apache.spark.sql.hudi.command.DeleteHoodieTableCommand" + val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE)) + val tableDesc = + TableDesc( + "dft", + classOf[HudiDataSourceV2RelationTableExtractor], + actionTypeDesc = Some(actionTypeDesc)) + TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query"))) + } + + val UpdateHoodieTableCommand = { + val cmd = "org.apache.spark.sql.hudi.command.UpdateHoodieTableCommand" + val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE)) + val tableDesc = + TableDesc( + "ut", + classOf[HudiDataSourceV2RelationTableExtractor], + actionTypeDesc = Some(actionTypeDesc)) + TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query"))) + } + + val MergeIntoHoodieTableCommand = { + val cmd = "org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand" + val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE)) + val tableDesc = + TableDesc( + "mergeInto", + classOf[HudiMergeIntoTargetTableExtractor], + actionTypeDesc = Some(actionTypeDesc)) + val queryDescs = QueryDesc("mergeInto", classOf[HudiMergeIntoSourceTableExtractor]) + TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDescs)) + } + val data: Array[TableCommandSpec] = Array( AlterHoodieTableAddColumnsCommand, AlterHoodieTableChangeColumnCommand, @@ -165,9 +200,12 @@ object HudiCommands { CreateHoodieTableLikeCommand, CompactionHoodieTableCommand, CompactionShowHoodieTableCommand, + DeleteHoodieTableCommand, DropHoodieTableCommand, InsertIntoHoodieTableCommand, + MergeIntoHoodieTableCommand, RepairHoodieTableCommand, TruncateHoodieTableCommand, - Spark31AlterTableCommand) + Spark31AlterTableCommand, + UpdateHoodieTableCommand) } diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala index e707f0c9ed2..93d075a6a15 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -370,4 +370,66 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { } } } + + test("DeleteHoodieTableCommand/UpdateHoodieTableCommand/MergeIntoHoodieTableCommand") { + withSingleCallEnabled { + withCleanTmpResources(Seq( + (s"$namespace1.$table1", "table"), + (s"$namespace1.$table2", "table"), + (namespace1, "database"))) { + doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1")) + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table1(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table2(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + val deleteFrom = s"DELETE FROM $namespace1.$table1 WHERE id = 10" + interceptContains[AccessControlException] { + doAs(someone, sql(deleteFrom)) + }(s"does not have [update] privilege on [$namespace1/$table1]") + + val updateSql = s"UPDATE $namespace1.$table1 SET name = 'test' WHERE id > 10" + interceptContains[AccessControlException] { + doAs(someone, sql(updateSql)) + }(s"does not have [update] privilege on [$namespace1/$table1]") + + val mergeIntoSQL = + s""" + |MERGE INTO $namespace1.$table1 target + |USING $namespace1.$table2 source + |ON target.id = source.id + |WHEN MATCHED + |AND target.name == 'test' + | THEN UPDATE SET id = source.id, name = source.name, city = source.city + |""".stripMargin + interceptContains[AccessControlException] { + doAs(someone, sql(mergeIntoSQL)) + }(s"does not have [select] privilege on " + + s"[$namespace1/$table2/id,$namespace1/$table2/name,$namespace1/$table2/city]") + } + } + } } From 1a72f132372234c98e6f286fb4b6ab8444e9b9dd Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Thu, 19 Oct 2023 20:31:32 +0800 Subject: [PATCH 2/6] update --- .../plugin/spark/authz/gen/HudiCommands.scala | 22 +++---- ...HudiCatalogRangerSparkExtensionSuite.scala | 62 +++++++++++++++++++ 2 files changed, 73 insertions(+), 11 deletions(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala index e4949d54181..522059f27b6 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala @@ -155,6 +155,17 @@ object HudiCommands { TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query"))) } + val ShowHoodieTablePartitionsCommand = { + val cmd = "org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand" + val columnDesc = ColumnDesc("specOpt", classOf[PartitionOptionColumnExtractor]) + val tableDesc = TableDesc( + "tableIdentifier", + classOf[TableIdentifierTableExtractor], + isInput = true, + columnDesc = Some(columnDesc)) + TableCommandSpec(cmd, Seq(tableDesc), SHOWPARTITIONS) + } + val DeleteHoodieTableCommand = { val cmd = "org.apache.spark.sql.hudi.command.DeleteHoodieTableCommand" val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE)) @@ -189,17 +200,6 @@ object HudiCommands { TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDescs)) } - val ShowHoodieTablePartitionsCommand = { - val cmd = "org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand" - val columnDesc = ColumnDesc("specOpt", classOf[PartitionOptionColumnExtractor]) - val tableDesc = TableDesc( - "tableIdentifier", - classOf[TableIdentifierTableExtractor], - isInput = true, - columnDesc = Some(columnDesc)) - TableCommandSpec(cmd, Seq(tableDesc), SHOWPARTITIONS) - } - val data: Array[TableCommandSpec] = Array( AlterHoodieTableAddColumnsCommand, AlterHoodieTableChangeColumnCommand, diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala index 193446bb24f..b14076b2cfb 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -407,4 +407,66 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { } } } + + test("DeleteHoodieTableCommand/UpdateHoodieTableCommand/MergeIntoHoodieTableCommand") { + withSingleCallEnabled { + withCleanTmpResources(Seq( + (s"$namespace1.$table1", "table"), + (s"$namespace1.$table2", "table"), + (namespace1, "database"))) { + doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1")) + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table1(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + doAs( + admin, + sql( + s""" + |CREATE TABLE IF NOT EXISTS $namespace1.$table2(id int, name string, city string) + |USING HUDI + |OPTIONS ( + | type = 'cow', + | primaryKey = 'id', + | 'hoodie.datasource.hive_sync.enable' = 'false' + |) + |PARTITIONED BY(city) + |""".stripMargin)) + + val deleteFrom = s"DELETE FROM $namespace1.$table1 WHERE id = 10" + interceptContains[AccessControlException] { + doAs(someone, sql(deleteFrom)) + }(s"does not have [update] privilege on [$namespace1/$table1]") + + val updateSql = s"UPDATE $namespace1.$table1 SET name = 'test' WHERE id > 10" + interceptContains[AccessControlException] { + doAs(someone, sql(updateSql)) + }(s"does not have [update] privilege on [$namespace1/$table1]") + + val mergeIntoSQL = + s""" + |MERGE INTO $namespace1.$table1 target + |USING $namespace1.$table2 source + |ON target.id = source.id + |WHEN MATCHED + |AND target.name == 'test' + | THEN UPDATE SET id = source.id, name = source.name, city = source.city + |""".stripMargin + interceptContains[AccessControlException] { + doAs(someone, sql(mergeIntoSQL)) + }(s"does not have [select] privilege on " + + s"[$namespace1/$table2/id,$namespace1/$table2/name,$namespace1/$table2/city]") + } + } + } } From df6e244e22c6ec4f1cf4b91af7d44994af23511b Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Fri, 20 Oct 2023 11:56:14 +0800 Subject: [PATCH 3/6] update --- .../org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor | 4 ++-- .../org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor index 3916dacc804..5fbca90908c 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor @@ -15,6 +15,6 @@ # limitations under the License. # +org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoSourceTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanOptionQueryExtractor -org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanQueryExtractor -org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoSourceTableExtractor \ No newline at end of file +org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanQueryExtractor \ No newline at end of file diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor index d4f02e1a539..33c8b8759fc 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor @@ -19,6 +19,8 @@ org.apache.kyuubi.plugin.spark.authz.serde.CatalogTableOptionTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.CatalogTableTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.DataSourceV2RelationTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.ExpressionSeqTableExtractor +org.apache.kyuubi.plugin.spark.authz.serde.HudiDataSourceV2RelationTableExtractor +org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoTargetTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.IdentifierTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.LogicalRelationTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.ResolvedDbObjectNameTableExtractor @@ -27,5 +29,3 @@ org.apache.kyuubi.plugin.spark.authz.serde.ResolvedTableTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.StringTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.TableIdentifierTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.TableTableExtractor -org.apache.kyuubi.plugin.spark.authz.serde.HudiDataSourceV2RelationTableExtractor -org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoTargetTableExtractor From 19497d12c2db6b048816333c8eb8423dc6670e73 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Fri, 20 Oct 2023 15:24:58 +0800 Subject: [PATCH 4/6] Update tableExtractors.scala --- .../spark/authz/serde/tableExtractors.scala | 45 ++++++++----------- 1 file changed, 18 insertions(+), 27 deletions(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala index 3cf643018c1..47c486af360 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala @@ -20,15 +20,13 @@ package org.apache.kyuubi.plugin.spark.authz.serde import java.util.{Map => JMap} import scala.collection.JavaConverters._ -import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.Expression -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} -import org.apache.kyuubi.plugin.spark.authz.{PrivilegeObject, PrivilegesBuilder} import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._ import org.apache.kyuubi.util.reflect.ReflectUtils._ @@ -82,7 +80,9 @@ class TableIdentifierTableExtractor extends TableExtractor { val catalogTable = spark.sessionState.catalog.getTableMetadata(identifier) Option(catalogTable.owner).filter(_.nonEmpty) } catch { - case _: Exception => None + case e: Exception => + e.printStackTrace() + None } Some(Table(None, identifier.database, identifier.table, owner)) } @@ -245,35 +245,26 @@ class TableTableExtractor extends TableExtractor { class HudiDataSourceV2RelationTableExtractor extends TableExtractor { override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = { - val outputObjs = new ArrayBuffer[PrivilegeObject] - PrivilegesBuilder.buildQuery(invokeAs[LogicalPlan](v1, "table"), outputObjs, spark = spark) - if (outputObjs.isEmpty) { - None - } else { - Option(Table( - outputObjs.head.catalog, - Option(outputObjs.head.dbname), - outputObjs.head.objectName, - outputObjs.head.owner)) + invokeAs[LogicalPlan](v1, "table") match { + // Match multipartIdentifier with tableAlias + case SubqueryAlias(_, SubqueryAlias(identifier, _)) => + new StringTableExtractor().apply(spark, identifier.toString()) + // Match multipartIdentifier without tableAlias + case SubqueryAlias(identifier, _) => + new StringTableExtractor().apply(spark, identifier.toString()) } } } class HudiMergeIntoTargetTableExtractor extends TableExtractor { override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = { - val outputObjs = new ArrayBuffer[PrivilegeObject] - PrivilegesBuilder.buildQuery( - invokeAs[LogicalPlan](v1, "targetTable"), - outputObjs, - spark = spark) - if (outputObjs.isEmpty) { - None - } else { - Option(Table( - outputObjs.head.catalog, - Option(outputObjs.head.dbname), - outputObjs.head.objectName, - outputObjs.head.owner)) + invokeAs[LogicalPlan](v1, "targetTable") match { + // Match multipartIdentifier with tableAlias + case SubqueryAlias(_, SubqueryAlias(identifier, relation)) => + new StringTableExtractor().apply(spark, identifier.toString()) + // Match multipartIdentifier without tableAlias + case SubqueryAlias(identifier, _) => + new StringTableExtractor().apply(spark, identifier.toString()) } } } From 08be589b72b0370b90869739f7b45fb04a642d65 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Fri, 20 Oct 2023 15:30:28 +0800 Subject: [PATCH 5/6] Update org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor --- .../org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor index 5fbca90908c..2406a40e196 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor @@ -17,4 +17,4 @@ org.apache.kyuubi.plugin.spark.authz.serde.HudiMergeIntoSourceTableExtractor org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanOptionQueryExtractor -org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanQueryExtractor \ No newline at end of file +org.apache.kyuubi.plugin.spark.authz.serde.LogicalPlanQueryExtractor From 2598af20383548a632d430b3e1e4acd3da3c4c2c Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Fri, 20 Oct 2023 15:33:58 +0800 Subject: [PATCH 6/6] Update HudiCatalogRangerSparkExtensionSuite.scala --- .../authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala index b14076b2cfb..fd7acd1295e 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala @@ -33,7 +33,7 @@ import org.apache.kyuubi.util.AssertionUtils.interceptContains */ @HudiTest class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { - override protected val catalogImpl: String = "hive" + override protected val catalogImpl: String = "in-memory" // TODO: Apache Hudi not support Spark 3.5 and Scala 2.13 yet, // should change after Apache Hudi support Spark 3.5 and Scala 2.13. private def isSupportedVersion = !isSparkV35OrGreater && !isScalaV213 @@ -447,11 +447,13 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { interceptContains[AccessControlException] { doAs(someone, sql(deleteFrom)) }(s"does not have [update] privilege on [$namespace1/$table1]") + doAs(admin, sql(deleteFrom)) val updateSql = s"UPDATE $namespace1.$table1 SET name = 'test' WHERE id > 10" interceptContains[AccessControlException] { doAs(someone, sql(updateSql)) }(s"does not have [update] privilege on [$namespace1/$table1]") + doAs(admin, sql(updateSql)) val mergeIntoSQL = s""" @@ -466,6 +468,7 @@ class HudiCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { doAs(someone, sql(mergeIntoSQL)) }(s"does not have [select] privilege on " + s"[$namespace1/$table2/id,$namespace1/$table2/name,$namespace1/$table2/city]") + doAs(admin, sql(mergeIntoSQL)) } } }