Skip to content

Commit 8a67796

Browse files
committed
[KYUUBI #7256] Enable authZ compile support for Spark 4.0 and refactor some test methods
### Why are the changes needed? This PR enables authZ compile support for Spark 4.0 ``` build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz -am install -DskipTests ``` ``` [ERROR] [Error] /Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:19: object Strategy is not a member of package org.apache.spark.sql [ERROR] [Error] /Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:23: not found: type Strategy [ERROR] [Error] /Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala:58: type mismatch; found : org.apache.kyuubi.plugin.spark.authz.rule.rowfilter.FilterDataSourceV2Strategy.type required: v1.StrategyBuilder (which expands to) org.apache.spark.sql.SparkSession => org.apache.spark.sql.execution.SparkStrategy [ERROR] three errors found ``` In addition, it refactors two methods in the test helper class `SparkSessionProvider` 1. Refactor `isCatalogSupportPurge` to an abstract method `supportPurge` because some UTs do not rely on the current catalog. 2. Add a new helper method `def doAs[T](user: String)(f: => T): T`, now the caller can use it ``` doAs("someone") { ... } ``` ### How was this patch tested? Pass GHA to ensure it breaks nothing, manually tested Spark 4.0 compile ``` build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz -am install -DskipTests ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #7256 from pan3793/authz-refactor. Closes #7256 b84cec8 [Cheng Pan] add missing override ede364f [Cheng Pan] Enable authZ compile support for Spark 4.0 and refactor some test methods Authored-by: Cheng Pan <[email protected]> Signed-off-by: Cheng Pan <[email protected]>
1 parent f2539d2 commit 8a67796

File tree

7 files changed

+24
-22
lines changed

7 files changed

+24
-22
lines changed

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,12 @@
1616
*/
1717
package org.apache.kyuubi.plugin.spark.authz.rule.rowfilter
1818

19-
import org.apache.spark.sql.{SparkSession, Strategy}
19+
import org.apache.spark.sql.SparkSession
2020
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2121
import org.apache.spark.sql.execution.SparkPlan
22+
import org.apache.spark.sql.execution.SparkStrategy
2223

23-
case class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
24+
case class FilterDataSourceV2Strategy(spark: SparkSession) extends SparkStrategy {
2425
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
2526
case ObjectFilterPlaceHolder(child) if child.nodeName == "ShowNamespaces" =>
2627
spark.sessionState.planner.plan(child)

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala

Lines changed: 14 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,10 @@ import org.scalatest.Assertions._
2727

2828
import org.apache.kyuubi.Utils
2929
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
30-
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
31-
import org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
3230

3331
trait SparkSessionProvider {
3432
protected val catalogImpl: String
33+
protected def supportPurge: Boolean = true
3534
protected def format: String = if (catalogImpl == "hive") "hive" else "parquet"
3635

3736
protected val extension: SparkSessionExtensions => Unit = _ => ()
@@ -85,26 +84,29 @@ trait SparkSessionProvider {
8584

8685
protected val sql: String => DataFrame = spark.sql
8786

88-
protected def doAs[T](user: String, f: => T): T = {
87+
protected def doAs[T](user: String, f: => T, unused: String = ""): T = {
8988
UserGroupInformation.createRemoteUser(user).doAs[T](
9089
new PrivilegedExceptionAction[T] {
9190
override def run(): T = f
9291
})
9392
}
93+
94+
protected def doAs[T](user: String)(f: => T): T = {
95+
UserGroupInformation.createRemoteUser(user).doAs[T](
96+
new PrivilegedExceptionAction[T] {
97+
override def run(): T = f
98+
})
99+
}
100+
94101
protected def withCleanTmpResources[T](res: Seq[(String, String)])(f: => T): T = {
95102
try {
96103
f
97104
} finally {
98105
res.foreach {
99-
case (t, "table") => doAs(
100-
admin, {
101-
val purgeOption =
102-
if (isCatalogSupportPurge(
103-
spark.sessionState.catalogManager.currentCatalog.name())) {
104-
"PURGE"
105-
} else ""
106-
sql(s"DROP TABLE IF EXISTS $t $purgeOption")
107-
})
106+
case (t, "table") => doAs(admin) {
107+
val purgeOption = if (supportPurge) "PURGE" else ""
108+
sql(s"DROP TABLE IF EXISTS $t $purgeOption")
109+
}
108110
case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS $db"))
109111
case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS $fn"))
110112
case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
@@ -118,12 +120,4 @@ trait SparkSessionProvider {
118120
protected def checkAnswer(user: String, query: String, result: Seq[Row]): Unit = {
119121
doAs(user, assert(sql(query).collect() === result))
120122
}
121-
122-
private def isCatalogSupportPurge(catalogName: String): Boolean = {
123-
val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName, deltaCatalogClassName)
124-
spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
125-
case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
126-
case _ => false
127-
}
128-
}
129123
}

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.kyuubi.util.AssertionUtils._
3535
@DeltaTest
3636
class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
3737
override protected val catalogImpl: String = "hive"
38+
override protected val supportPurge: Boolean = false
3839
override protected val sqlExtensions: String = "io.delta.sql.DeltaSparkSessionExtension"
3940

4041
val namespace1 = deltaNamespace

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.apache.kyuubi.util.AssertionUtils._
3131
@PaimonTest
3232
class PaimonCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
3333
override protected val catalogImpl: String = "hive"
34+
override protected val supportPurge: Boolean = false
3435
private def isSupportedVersion = isScalaV212
3536
override protected val sqlExtensions: String =
3637
if (isSupportedVersion) "org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions"

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
3333
*/
3434
class V2JdbcTableCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
3535
override protected val catalogImpl: String = "in-memory"
36+
override protected val supportPurge: Boolean = false
3637

3738
val catalogV2 = "testcat"
3839
val jdbcCatalogV2 = "jdbc2"

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
3737

3838
override protected val catalogImpl: String = "in-memory"
3939

40+
override protected val supportPurge: Boolean = false
41+
4042
override protected def format: String = ""
4143

4244
override def beforeAll(): Unit = {

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ class RowFilteringForJDBCV2Suite extends RowFilteringTestBase {
3838

3939
override protected val catalogImpl: String = "in-memory"
4040

41+
override protected val supportPurge: Boolean = false
42+
4143
override protected def format: String = ""
4244

4345
override def beforeAll(): Unit = {

0 commit comments

Comments
 (0)