diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala index 5c496b8744b..a0ed5fb6a14 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala @@ -28,6 +28,7 @@ import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._ import org.apache.kyuubi.plugin.spark.authz.serde._ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._ +import org.apache.kyuubi.plugin.spark.authz.util.PermanentViewMarker import org.apache.kyuubi.util.reflect.ReflectUtils._ object PrivilegesBuilder { @@ -102,6 +103,11 @@ object PrivilegesBuilder { val cols = conditionList ++ aggCols buildQuery(a.child, privilegeObjects, projectionList, cols, spark) + case pvm: PermanentViewMarker => + getScanSpec(pvm).tables(pvm, spark).foreach { table => + privilegeObjects += PrivilegeObject(table, pvm.visitColNames) + } + case scan if isKnownScan(scan) && scan.resolved => getScanSpec(scan).tables(scan, spark).foreach(mergeProjection(_, scan)) diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleApplyPermanentViewMarker.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleApplyPermanentViewMarker.scala index 679b5d65dfe..909cd9e93d3 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleApplyPermanentViewMarker.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleApplyPermanentViewMarker.scala @@ -39,12 +39,16 @@ class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] { case permanentView: View if hasResolvedPermanentView(permanentView) => val resolvedSubquery = permanentView.transformAllExpressions { case subquery: SubqueryExpression => - // TODO: Currently, we do not do an auth check in the subquery - // as the main query part also secures it. But for performance consideration, - // we also pre-check it in subqueries and fail fast with negative privileges. - subquery.withNewPlan(plan = PermanentViewMarker(subquery.plan, null)) + subquery.withNewPlan(plan = + PermanentViewMarker( + subquery.plan, + permanentView.desc, + permanentView.output.map(_.name))) } - PermanentViewMarker(resolvedSubquery, resolvedSubquery.desc) + PermanentViewMarker( + resolvedSubquery, + resolvedSubquery.desc, + resolvedSubquery.output.map(_.name)) case other => apply(other) } } diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/PermanentViewMarker.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/PermanentViewMarker.scala index 69b55e0fc74..d19f7a92314 100644 --- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/PermanentViewMarker.scala +++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/PermanentViewMarker.scala @@ -21,7 +21,10 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode} -case class PermanentViewMarker(child: LogicalPlan, catalogTable: CatalogTable) extends UnaryNode +case class PermanentViewMarker( + child: LogicalPlan, + catalogTable: CatalogTable, + visitColNames: Seq[String]) extends UnaryNode with WithInternalChild { override def output: Seq[Attribute] = child.output diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala index e4e3014f50a..532a114360b 100644 --- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala +++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala @@ -837,6 +837,51 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite { } } + test("[KYUUBI #5475] Check permanent view's subquery should check view's correct privilege") { + val db1 = defaultDb + val table1 = "table1" + val table2 = "table2" + val view1 = "view1" + withSingleCallEnabled { + withCleanTmpResources( + Seq((s"$db1.$table1", "table"), (s"$db1.$table2", "table"), (s"$db1.$view1", "view"))) { + doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table1(id int, scope int)")) + doAs( + admin, + sql( + s""" + | CREATE TABLE IF NOT EXISTS $db1.$table2( + | id int, + | name string, + | age int, + | scope int) + | """.stripMargin)) + doAs( + admin, + sql( + s""" + |CREATE VIEW $db1.$view1 + |AS + |WITH temp AS ( + | SELECT max(scope) max_scope + | FROM $db1.$table1) + |SELECT id, name, max(scope) as max_scope, sum(age) sum_age + |FROM $db1.$table2 + |WHERE scope in (SELECT max_scope FROM temp) + |GROUP BY id, name + |""".stripMargin)) + // Will just check permanent view privilege. + val e2 = intercept[AccessControlException]( + doAs( + someone, + sql(s"SELECT id as new_id, name, max_scope FROM $db1.$view1".stripMargin).show())) + assert(e2.getMessage.contains( + s"does not have [select] privilege on " + + s"[$db1/$view1/id,$db1/$view1/name,$db1/$view1/max_scope,$db1/$view1/sum_age]")) + } + } + } + test("[KYUUBI #5492] saveAsTable create DataSource table miss db info") { val table1 = "table1" withSingleCallEnabled {