Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,11 @@ jobs:
spark-archive: '-Pscala-2.13'
exclude-tags: ''
comment: 'normal'
- java: 21
spark: '4.1'
spark-archive: '-Pscala-2.13'
exclude-tags: ''
comment: 'normal'
- java: 8
spark: '3.5'
spark-archive: '-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-3.3.3 -Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6'
Expand All @@ -79,6 +84,11 @@ jobs:
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.0.1 -Dspark.archive.name=spark-4.0.1-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.0-binary'
- java: 17
spark: '3.5'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.1.0-preview4 -Dspark.archive.name=spark-4.1.0-preview4-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.1-binary'
env:
SPARK_LOCAL_IP: localhost
steps:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,28 @@

package org.apache.kyuubi.engine.spark.operation

import java.lang.{Boolean => JBoolean}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.spark.kyuubi.SparkUtilsHelper
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.execution.CommandExecutionMode
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.StructType

import org.apache.kyuubi.KyuubiSQLException
import org.apache.kyuubi.config.KyuubiConf.{LINEAGE_PARSER_PLUGIN_PROVIDER, OPERATION_PLAN_ONLY_EXCLUDES, OPERATION_PLAN_ONLY_OUT_STYLE}
import org.apache.kyuubi.engine.spark.KyuubiSparkUtil.getSessionConf
import org.apache.kyuubi.engine.spark.operation.PlanOnlyStatement._
import org.apache.kyuubi.operation.{AnalyzeMode, ArrayFetchIterator, ExecutionMode, IterableFetchIterator, JsonStyle, LineageMode, OperationHandle, OptimizeMode, OptimizeWithStatsMode, ParseMode, PhysicalMode, PlainStyle, PlanOnlyMode, PlanOnlyStyle, UnknownMode, UnknownStyle}
import org.apache.kyuubi.operation.PlanOnlyMode.{notSupportedModeError, unknownModeError}
import org.apache.kyuubi.operation.PlanOnlyStyle.{notSupportedStyleError, unknownStyleError}
import org.apache.kyuubi.operation.log.OperationLog
import org.apache.kyuubi.session.Session
import org.apache.kyuubi.util.reflect.DynMethods

/**
* Perform the statement parsing, analyzing or optimizing only without executing it
Expand Down Expand Up @@ -110,11 +115,8 @@ class PlanOnlyStatement(
spark.sessionState.analyzer.checkAnalysis(analyzed)
val optimized = spark.sessionState.optimizer.execute(analyzed)
optimized.stats
iter = new IterableFetchIterator(Seq(Row(optimized.treeString(
verbose = true,
addSuffix = true,
SQLConf.get.maxToStringFields,
printOperatorId = false))))
iter = new IterableFetchIterator(
Seq(Row(treeString(optimized, verbose = true, addSuffix = true))))
case PhysicalMode =>
val physical = spark.sessionState.executePlan(plan, CommandExecutionMode.SKIP).sparkPlan
iter = new IterableFetchIterator(Seq(Row(physical.toString())))
Expand Down Expand Up @@ -184,3 +186,33 @@ class PlanOnlyStatement(
}

}

object PlanOnlyStatement {

private val uboundTreeStringMehod = DynMethods.builder("treeString")
.impl( // SPARK-52065 (4.1.0)
classOf[TreeNode[_]],
classOf[Boolean],
classOf[Boolean],
classOf[Int],
classOf[Boolean],
classOf[Boolean])
.impl(
classOf[TreeNode[_]],
classOf[Boolean],
classOf[Boolean],
classOf[Int],
classOf[Boolean])
.build()

def treeString(
tree: TreeNode[_],
verbose: JBoolean,
addSuffix: JBoolean = false,
maxFields: Integer = SQLConf.get.maxToStringFields,
printOperatorId: JBoolean = false,
printOutputColumns: JBoolean = false): String = {
uboundTreeStringMehod.bind(tree)
.invoke(verbose, addSuffix, maxFields, printOperatorId, printOutputColumns)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import org.apache.spark.SparkConf
import org.apache.spark.util.Utils

import org.apache.kyuubi.Logging
import org.apache.kyuubi.util.reflect.{DynClasses, DynMethods}

/**
* A place to invoke non-public APIs of [[Utils]], anything to be added here need to
Expand All @@ -37,11 +38,21 @@ object SparkUtilsHelper extends Logging {
Utils.redact(regex, text)
}

private val readOnlySparkConfCls = DynClasses.builder()
.impl("org.apache.spark.ReadOnlySparkConf")
.orNull()
.build()

private val getLocalDirMethod = DynMethods.builder("getLocalDir")
.impl(Utils.getClass, readOnlySparkConfCls) // SPARK-53459 (4.1.0)
.impl(Utils.getClass, classOf[SparkConf])
.build(Utils)

/**
* Get the path of a temporary directory.
*/
def getLocalDir(conf: SparkConf): String = {
Utils.getLocalDir(conf)
getLocalDirMethod.invoke(conf)
}

def classesArePresent(className: String): Boolean = {
Expand Down
33 changes: 27 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@
<maven.plugin.build.helper.version>3.3.0</maven.plugin.build.helper.version>
<maven.plugin.download.version>1.11.3</maven.plugin.download.version>
<maven.plugin.download.cache.path></maven.plugin.download.cache.path>
<maven.plugin.enforcer.mojo.rules.version>1.8.0</maven.plugin.enforcer.mojo.rules.version>
<maven.plugin.enforcer.mojo.rules.version>1.11.0</maven.plugin.enforcer.mojo.rules.version>
<maven.plugin.flatten.version>1.6.0</maven.plugin.flatten.version>
<maven.plugin.frontend.version>1.12.1</maven.plugin.frontend.version>
<maven.plugin.frontend.inheritsProxyConfigFromMaven>false</maven.plugin.frontend.inheritsProxyConfigFromMaven>
Expand Down Expand Up @@ -1761,11 +1761,9 @@
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<numUnapprovedLicenses>0</numUnapprovedLicenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<approvedLicenses>
<id>AL2.0</id>
</approvedLicenses>
<excludesFile>.rat-excludes</excludesFile>
</configuration>
<executions>
Expand Down Expand Up @@ -2058,6 +2056,29 @@
</properties>
</profile>

<profile>
<id>spark-4.1</id>
<modules>
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
<maven.compiler.release>17</maven.compiler.release>
<enforcer.maxJdkVersion>17</enforcer.maxJdkVersion>
<spark.version>4.1.0-preview4</spark.version>
<spark.binary.version>4.0</spark.binary.version>
<antlr4.version>4.13.1</antlr4.version>
<delta.version>4.0.0</delta.version>
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
<!-- TODO: update once Hudi support Spark 4.0 -->
<hudi.artifact>hudi-spark3.5-bundle_${scala.binary.version}</hudi.artifact>
<!-- TODO: update once Paimon support Spark 4.0.
paimon-spark-3.5 contains Scala 2.12 classes cause conflicts with Scala 2.13 -->
<paimon.artifact>paimon-common</paimon.artifact>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
</properties>
</profile>

<profile>
<id>spark-master</id>
<properties>
Expand Down
Loading