diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor index e78da29df0d..f0038e75b5f 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor @@ -16,4 +16,5 @@ # org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor +org.apache.kyuubi.plugin.spark.authz.serde.OptionsUriExtractor org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json index c2368c2f471..581f33506ad 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json +++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json @@ -1344,7 +1344,11 @@ "fieldName" : "query", "fieldExtractor" : "LogicalPlanQueryExtractor" } ], - "uriDescs" : [ ] + "uriDescs" : [ { + "fieldName" : "options", + "fieldExtractor" : "OptionsUriExtractor", + "isInput" : false + } ] }, { "classname" : "org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand", "tableDescs" : [ { diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala index 7feca151122..418c5a0a084 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala @@ -41,3 +41,9 @@ class CatalogStorageFormatURIExtractor extends URIExtractor { v1.asInstanceOf[CatalogStorageFormat].locationUri.map(uri => Uri(uri.getPath)).toSeq } } + +class OptionsUriExtractor extends URIExtractor { + override def apply(v1: AnyRef): Seq[Uri] = { + v1.asInstanceOf[Map[String, String]].get("path").map(Uri).toSeq + } +} diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala index b69f08bcf0e..7e42b02bc74 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala @@ -559,7 +559,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] { val SaveIntoDataSourceCommand = { val cmd = "org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand" val queryDesc = queryQueryDesc - TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc)) + val uriDesc = UriDesc("options", classOf[OptionsUriExtractor]) + TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = Seq(uriDesc)) } val InsertIntoHadoopFsRelationCommand = { diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala index e39a953de6d..1f1b42b0feb 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala @@ -1085,4 +1085,15 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { } } } + test("SaveIntoDataSourceCommand") { + withTempDir { path => + withSingleCallEnabled { + val df = sql("SELECT 1 as id, 'Tony' as name") + interceptContains[AccessControlException](doAs( + someone, + df.write.format("console").save(path.toString)))( + s"does not have [select] privilege on [[$path, $path/]]") + } + } + } }