diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 586db9c8ec3..7c442dd0f48 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -170,16 +170,18 @@ jobs:
**/target/unit-tests.log
**/kyuubi-spark-sql-engine.log*
- scala213:
- name: Scala Compilation Test
+ scala-test:
+ name: Scala Test
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
- java:
- - '8'
scala:
- '2.13'
+ java:
+ - '8'
+ spark:
+ - '3.4'
steps:
- uses: actions/checkout@v3
- name: Tune Runner VM
@@ -193,14 +195,24 @@ jobs:
check-latest: false
- name: Setup Maven
uses: ./.github/actions/setup-maven
+ - name: Cache Engine Archives
+ uses: ./.github/actions/cache-engine-archives
- name: Build on Scala ${{ matrix.scala }}
run: |
- MODULES='!externals/kyuubi-flink-sql-engine'
- ./build/mvn clean install -pl ${MODULES} -am \
- -DskipTests -Pflink-provided,hive-provided,spark-provided \
- -Pjava-${{ matrix.java }} \
- -Pscala-${{ matrix.scala }} \
- -Pspark-3.3
+ TEST_MODULES="!externals/kyuubi-flink-sql-engine,!integration-tests/kyuubi-flink-it"
+ ./build/mvn clean install ${MVN_OPT} -pl ${TEST_MODULES} -am \
+ -Pscala-${{ matrix.scala }} -Pjava-${{ matrix.java }} -Pspark-${{ matrix.spark }}
+ - name: Upload test logs
+ if: failure()
+ uses: actions/upload-artifact@v3
+ with:
+ name: unit-tests-log-scala-${{ matrix.scala }}-java-${{ matrix.java }}-spark-${{ matrix.spark }}
+ path: |
+ **/target/unit-tests.log
+ **/kyuubi-spark-sql-engine.log*
+ **/kyuubi-spark-batch-submit.log*
+ **/kyuubi-jdbc-engine.log*
+ **/kyuubi-hive-sql-engine.log*
flink-it:
name: Flink Test
diff --git a/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/Lineage.scala b/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/Lineage.scala
index 4bd0bd0b168..730deeb01e2 100644
--- a/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/Lineage.scala
+++ b/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/Lineage.scala
@@ -32,8 +32,9 @@ class Lineage(
override def equals(other: Any): Boolean = other match {
case otherLineage: Lineage =>
- otherLineage.inputTables == inputTables && otherLineage.outputTables == outputTables &&
- otherLineage.columnLineage == columnLineage
+ otherLineage.inputTables.toSet == inputTables.toSet &&
+ otherLineage.outputTables.toSet == outputTables.toSet &&
+ otherLineage.columnLineage.toSet == columnLineage.toSet
case _ => false
}
diff --git a/integration-tests/kyuubi-flink-it/pom.xml b/integration-tests/kyuubi-flink-it/pom.xml
index 3c0e3f31a7c..15699be1d8a 100644
--- a/integration-tests/kyuubi-flink-it/pom.xml
+++ b/integration-tests/kyuubi-flink-it/pom.xml
@@ -112,4 +112,8 @@
+
+ target/scala-${scala.binary.version}/classes
+ target/scala-${scala.binary.version}/test-classes
+
diff --git a/integration-tests/kyuubi-hive-it/pom.xml b/integration-tests/kyuubi-hive-it/pom.xml
index 24e5529a2d3..c4e9f320c95 100644
--- a/integration-tests/kyuubi-hive-it/pom.xml
+++ b/integration-tests/kyuubi-hive-it/pom.xml
@@ -69,4 +69,9 @@
test
+
+
+ target/scala-${scala.binary.version}/classes
+ target/scala-${scala.binary.version}/test-classes
+
diff --git a/integration-tests/kyuubi-jdbc-it/pom.xml b/integration-tests/kyuubi-jdbc-it/pom.xml
index 08f74512e90..95ffd2038c1 100644
--- a/integration-tests/kyuubi-jdbc-it/pom.xml
+++ b/integration-tests/kyuubi-jdbc-it/pom.xml
@@ -114,5 +114,7 @@
+ target/scala-${scala.binary.version}/classes
+ target/scala-${scala.binary.version}/test-classes
diff --git a/integration-tests/kyuubi-trino-it/pom.xml b/integration-tests/kyuubi-trino-it/pom.xml
index 628f63818b9..c93d43c005b 100644
--- a/integration-tests/kyuubi-trino-it/pom.xml
+++ b/integration-tests/kyuubi-trino-it/pom.xml
@@ -88,4 +88,9 @@
+
+
+ target/scala-${scala.binary.version}/classes
+ target/scala-${scala.binary.version}/test-classes
+
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
index e3bb4ccb730..99482f0c5ff 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
@@ -17,8 +17,11 @@
package org.apache.kyuubi.operation
+import scala.collection.mutable.ListBuffer
+
import org.apache.kyuubi.{IcebergSuiteMixin, SPARK_COMPILE_VERSION}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.SparkVersionUtil
trait IcebergMetadataTests extends HiveJDBCTestHelper with IcebergSuiteMixin with SparkVersionUtil {
@@ -27,10 +30,11 @@ trait IcebergMetadataTests extends HiveJDBCTestHelper with IcebergSuiteMixin wit
withJdbcStatement() { statement =>
val metaData = statement.getConnection.getMetaData
val catalogs = metaData.getCatalogs
- catalogs.next()
- assert(catalogs.getString(TABLE_CAT) === "spark_catalog")
- catalogs.next()
- assert(catalogs.getString(TABLE_CAT) === catalog)
+ val results = ListBuffer[String]()
+ while (catalogs.next()) {
+ results += catalogs.getString(TABLE_CAT)
+ }
+ assertContains(results, "spark_catalog", catalog)
}
}
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
index 20d3f6fad5b..0ac56e3bcf0 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
@@ -270,7 +270,7 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
|""".stripMargin
val rs1 = statement.executeQuery(code)
rs1.next()
- assert(rs1.getString(1) startsWith "df: org.apache.spark.sql.DataFrame")
+ assert(rs1.getString(1) contains "df: org.apache.spark.sql.DataFrame")
// continue
val rs2 = statement.executeQuery("df.count()")
@@ -311,7 +311,7 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
|""".stripMargin
val rs5 = statement.executeQuery(code2)
rs5.next()
- assert(rs5.getString(1) startsWith "df: org.apache.spark.sql.DataFrame")
+ assert(rs5.getString(1) contains "df: org.apache.spark.sql.DataFrame")
// re-assign
val rs6 = statement.executeQuery("result.set(df)")
@@ -420,7 +420,7 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
statement.execute(code1)
val rs = statement.executeQuery(code2)
rs.next()
- assert(rs.getString(1) == "x: Int = 3")
+ assert(rs.getString(1) contains "x: Int = 3")
}
}
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
index 44b317c71ea..84807a62d87 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
@@ -17,7 +17,7 @@
package org.apache.kyuubi.engine
-import java.io.{File, FilenameFilter, IOException}
+import java.io.{File, FileFilter, IOException}
import java.net.URI
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}
@@ -56,13 +56,14 @@ trait ProcBuilder {
}
}
+ protected val engineScalaBinaryVersion: String = SCALA_COMPILE_VERSION
+
/**
* The engine jar or other runnable jar containing the main method
*/
def mainResource: Option[String] = {
// 1. get the main resource jar for user specified config first
- // TODO use SPARK_SCALA_VERSION instead of SCALA_COMPILE_VERSION
- val jarName = s"${module}_$SCALA_COMPILE_VERSION-$KYUUBI_VERSION.jar"
+ val jarName: String = s"${module}_$engineScalaBinaryVersion-$KYUUBI_VERSION.jar"
conf.getOption(s"kyuubi.session.engine.$shortName.main.resource").filter { userSpecified =>
// skip check exist if not local file.
val uri = new URI(userSpecified)
@@ -295,6 +296,11 @@ trait ProcBuilder {
}
}
+ protected lazy val engineHomeDirFilter: FileFilter = file => {
+ val fileName = file.getName
+ file.isDirectory && fileName.contains(s"$shortName-") && !fileName.contains("-engine")
+ }
+
/**
* Get the home directly that contains binary distributions of engines.
*
@@ -311,9 +317,6 @@ trait ProcBuilder {
* @return SPARK_HOME, HIVE_HOME, etc.
*/
protected def getEngineHome(shortName: String): String = {
- val homeDirFilter: FilenameFilter = (dir: File, name: String) =>
- dir.isDirectory && name.contains(s"$shortName-") && !name.contains("-engine")
-
val homeKey = s"${shortName.toUpperCase}_HOME"
// 1. get from env, e.g. SPARK_HOME, FLINK_HOME
env.get(homeKey)
@@ -321,14 +324,14 @@ trait ProcBuilder {
// 2. get from $KYUUBI_HOME/externals/kyuubi-download/target
env.get(KYUUBI_HOME).flatMap { p =>
val candidates = Paths.get(p, "externals", "kyuubi-download", "target")
- .toFile.listFiles(homeDirFilter)
+ .toFile.listFiles(engineHomeDirFilter)
if (candidates == null) None else candidates.map(_.toPath).headOption
}.filter(Files.exists(_)).map(_.toAbsolutePath.toFile.getCanonicalPath)
}.orElse {
// 3. get from kyuubi-server/../externals/kyuubi-download/target
Utils.getCodeSourceLocation(getClass).split("kyuubi-server").flatMap { cwd =>
val candidates = Paths.get(cwd, "externals", "kyuubi-download", "target")
- .toFile.listFiles(homeDirFilter)
+ .toFile.listFiles(engineHomeDirFilter)
if (candidates == null) None else candidates.map(_.toPath).headOption
}.find(Files.exists(_)).map(_.toAbsolutePath.toFile.getCanonicalPath)
} match {
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
index 351eddb7567..02f4064afc6 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
@@ -17,7 +17,7 @@
package org.apache.kyuubi.engine.spark
-import java.io.{File, IOException}
+import java.io.{File, FileFilter, IOException}
import java.nio.file.Paths
import java.util.Locale
@@ -25,6 +25,7 @@ import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import com.google.common.annotations.VisibleForTesting
+import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.security.UserGroupInformation
import org.apache.kyuubi._
@@ -35,8 +36,7 @@ import org.apache.kyuubi.engine.ProcBuilder.KYUUBI_ENGINE_LOG_PATH_KEY
import org.apache.kyuubi.ha.HighAvailabilityConf
import org.apache.kyuubi.ha.client.AuthTypes
import org.apache.kyuubi.operation.log.OperationLog
-import org.apache.kyuubi.util.KubernetesUtils
-import org.apache.kyuubi.util.Validator
+import org.apache.kyuubi.util.{KubernetesUtils, Validator}
class SparkProcessBuilder(
override val proxyUser: String,
@@ -102,6 +102,25 @@ class SparkProcessBuilder(
}
}
+ private[kyuubi] def extractSparkCoreScalaVersion(fileNames: Iterable[String]): String = {
+ fileNames.collectFirst { case SPARK_CORE_SCALA_VERSION_REGEX(scalaVersion) => scalaVersion }
+ .getOrElse(throw new KyuubiException("Failed to extract Scala version from spark-core jar"))
+ }
+
+ override protected val engineScalaBinaryVersion: String = {
+ val sparkCoreScalaVersion =
+ extractSparkCoreScalaVersion(Paths.get(sparkHome, "jars").toFile.list())
+ StringUtils.defaultIfBlank(System.getenv("SPARK_SCALA_VERSION"), sparkCoreScalaVersion)
+ }
+
+ override protected lazy val engineHomeDirFilter: FileFilter = file => {
+ val r = SCALA_COMPILE_VERSION match {
+ case "2.12" => SPARK_HOME_REGEX_SCALA_212
+ case "2.13" => SPARK_HOME_REGEX_SCALA_213
+ }
+ file.isDirectory && r.findFirstMatchIn(file.getName).isDefined
+ }
+
override protected lazy val commands: Array[String] = {
// complete `spark.master` if absent on kubernetes
completeMasterUrl(conf)
@@ -314,4 +333,13 @@ object SparkProcessBuilder {
final private val SPARK_SUBMIT_FILE = if (Utils.isWindows) "spark-submit.cmd" else "spark-submit"
final private val SPARK_CONF_DIR = "SPARK_CONF_DIR"
final private val SPARK_CONF_FILE_NAME = "spark-defaults.conf"
+
+ final private[kyuubi] val SPARK_CORE_SCALA_VERSION_REGEX =
+ """^spark-core_(\d\.\d+).*.jar$""".r
+
+ final private[kyuubi] val SPARK_HOME_REGEX_SCALA_212 =
+ """^spark-\d+\.\d+\.\d+-bin-hadoop\d+(\.\d+)?$""".r
+
+ final private[kyuubi] val SPARK_HOME_REGEX_SCALA_213 =
+ """^spark-\d+\.\d+\.\d+-bin-hadoop\d(\.\d+)?+-scala\d+(\.\d+)?$""".r
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index a4227d26e74..408f42f6404 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -26,7 +26,7 @@ import java.util.concurrent.{Executors, TimeUnit}
import org.scalatest.time.SpanSugar._
import org.scalatestplus.mockito.MockitoSugar
-import org.apache.kyuubi.{KerberizedTestHelper, KyuubiSQLException, Utils}
+import org.apache.kyuubi._
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.{ENGINE_LOG_TIMEOUT, ENGINE_SPARK_MAIN_RESOURCE}
import org.apache.kyuubi.engine.ProcBuilder.KYUUBI_ENGINE_LOG_PATH_KEY
@@ -34,6 +34,7 @@ import org.apache.kyuubi.engine.spark.SparkProcessBuilder._
import org.apache.kyuubi.ha.HighAvailabilityConf
import org.apache.kyuubi.ha.client.AuthTypes
import org.apache.kyuubi.service.ServiceUtils
+import org.apache.kyuubi.util.AssertionUtils._
class SparkProcessBuilderSuite extends KerberizedTestHelper with MockitoSugar {
private def conf = KyuubiConf().set("kyuubi.on", "off")
@@ -363,6 +364,46 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper with MockitoSugar {
.appendPodNameConf(conf3).get(KUBERNETES_EXECUTOR_POD_NAME_PREFIX)
assert(execPodNamePrefix3 === Some(s"kyuubi-$engineRefId"))
}
+
+ test("extract spark core scala version") {
+ val builder = new SparkProcessBuilder("kentyao", KyuubiConf(false))
+ Seq(
+ "spark-core_2.13-3.4.1.jar",
+ "spark-core_2.13-3.5.0-abc-20230921.jar",
+ "spark-core_2.13-3.5.0-xyz-1.2.3.jar",
+ "spark-core_2.13-3.5.0.1.jar",
+ "spark-core_2.13.2-3.5.0.jar").foreach { f =>
+ assertResult("2.13")(builder.extractSparkCoreScalaVersion(Seq(f)))
+ }
+
+ Seq(
+ "spark-dummy_2.13-3.5.0.jar",
+ "spark-core_2.13-3.5.0.1.zip",
+ "yummy-spark-core_2.13-3.5.0.jar").foreach { f =>
+ assertThrows[KyuubiException](builder.extractSparkCoreScalaVersion(Seq(f)))
+ }
+ }
+
+ test("match scala version of spark home") {
+ SCALA_COMPILE_VERSION match {
+ case "2.12" => Seq(
+ "spark-3.2.4-bin-hadoop3.2",
+ "spark-3.2.4-bin-hadoop2.7",
+ "spark-3.4.1-bin-hadoop3")
+ .foreach { sparkHome =>
+ assertMatches(sparkHome, SPARK_HOME_REGEX_SCALA_212)
+ assertNotMatches(sparkHome, SPARK_HOME_REGEX_SCALA_213)
+ }
+ case "2.13" => Seq(
+ "spark-3.2.4-bin-hadoop3.2-scala2.13",
+ "spark-3.4.1-bin-hadoop3-scala2.13",
+ "spark-3.5.0-bin-hadoop3-scala2.13")
+ .foreach { sparkHome =>
+ assertMatches(sparkHome, SPARK_HOME_REGEX_SCALA_213)
+ assertNotMatches(sparkHome, SPARK_HOME_REGEX_SCALA_212)
+ }
+ }
+ }
}
class FakeSparkProcessBuilder(config: KyuubiConf)
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionSigningSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionSigningSuite.scala
index a53e4650727..11121a74fb0 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionSigningSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionSigningSuite.scala
@@ -86,8 +86,9 @@ class SessionSigningSuite extends WithKyuubiServer with HiveJDBCTestHelper {
assert(rs2.next())
// skipping prefix "res0: String = " of returned scala result
- val publicKeyStr = rs1.getString(1).substring(15)
- val sessionUserSign = rs2.getString(1).substring(15)
+ val sep = " = "
+ val publicKeyStr = StringUtils.substringAfter(rs1.getString(1), sep)
+ val sessionUserSign = StringUtils.substringAfter(rs2.getString(1), sep)
assert(StringUtils.isNotBlank(publicKeyStr))
assert(StringUtils.isNotBlank(sessionUserSign))
diff --git a/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala b/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
index 2e9a0d3fe2d..9d33993b9d2 100644
--- a/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
+++ b/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
@@ -23,14 +23,16 @@ import java.util.Locale
import scala.collection.Traversable
import scala.io.Source
import scala.reflect.ClassTag
+import scala.util.matching.Regex
-import org.scalactic.{source, Prettifier}
+import org.scalactic.Prettifier
+import org.scalactic.source.Position
import org.scalatest.Assertions._
object AssertionUtils {
def assertEqualsIgnoreCase(expected: AnyRef)(actual: AnyRef)(
- implicit pos: source.Position): Unit = {
+ implicit pos: Position): Unit = {
val isEqualsIgnoreCase = (Option(expected), Option(actual)) match {
case (Some(expectedStr: String), Some(actualStr: String)) =>
expectedStr.equalsIgnoreCase(actualStr)
@@ -44,15 +46,15 @@ object AssertionUtils {
}
}
- def assertStartsWithIgnoreCase(expectedPrefix: String)(actual: String)(
- implicit pos: source.Position): Unit = {
+ def assertStartsWithIgnoreCase(expectedPrefix: String)(actual: String)(implicit
+ pos: Position): Unit = {
if (!actual.toLowerCase(Locale.ROOT).startsWith(expectedPrefix.toLowerCase(Locale.ROOT))) {
fail(s"Expected starting with '$expectedPrefix' ignoring case, but got [$actual]")(pos)
}
}
- def assertExistsIgnoreCase(expected: String)(actual: Iterable[String])(
- implicit pos: source.Position): Unit = {
+ def assertExistsIgnoreCase(expected: String)(actual: Iterable[String])(implicit
+ pos: Position): Unit = {
if (!actual.exists(_.equalsIgnoreCase(expected))) {
fail(s"Expected containing '$expected' ignoring case, but got [$actual]")(pos)
}
@@ -73,7 +75,7 @@ object AssertionUtils {
regenScript: String,
splitFirstExpectedLine: Boolean = false)(implicit
prettifier: Prettifier,
- pos: source.Position): Unit = {
+ pos: Position): Unit = {
val fileSource = Source.fromFile(path.toUri, StandardCharsets.UTF_8.name())
try {
def expectedLinesIter = if (splitFirstExpectedLine) {
@@ -104,13 +106,44 @@ object AssertionUtils {
}
}
+ /**
+ * Assert the iterable contains all the expected elements
+ */
+ def assertContains(actual: TraversableOnce[AnyRef], expected: AnyRef*)(implicit
+ prettifier: Prettifier,
+ pos: Position): Unit =
+ withClue(s", expected containing [${expected.mkString(", ")}]") {
+ val actualSeq = actual.toSeq
+ expected.foreach { elem => assert(actualSeq.contains(elem))(prettifier, pos) }
+ }
+
+ /**
+ * Asserts the string matches the regex
+ */
+ def assertMatches(actual: String, regex: Regex)(implicit
+ prettifier: Prettifier,
+ pos: Position): Unit =
+ withClue(s"'$actual' expected matching the regex '$regex'") {
+ assert(regex.findFirstMatchIn(actual).isDefined)(prettifier, pos)
+ }
+
+ /**
+ * Asserts the string does not match the regex
+ */
+ def assertNotMatches(actual: String, regex: Regex)(implicit
+ prettifier: Prettifier,
+ pos: Position): Unit =
+ withClue(s"'$actual' expected not matching the regex '$regex'") {
+ assert(regex.findFirstMatchIn(actual).isEmpty)(prettifier, pos)
+ }
+
/**
* Asserts that the given function throws an exception of the given type
* and with the exception message equals to expected string
*/
def interceptEquals[T <: Exception](f: => Any)(expected: String)(implicit
classTag: ClassTag[T],
- pos: source.Position): Unit = {
+ pos: Position): Unit = {
assert(expected != null)
val exception = intercept[T](f)(classTag, pos)
assertResult(expected)(exception.getMessage)
@@ -122,7 +155,7 @@ object AssertionUtils {
*/
def interceptContains[T <: Exception](f: => Any)(contained: String)(implicit
classTag: ClassTag[T],
- pos: source.Position): Unit = {
+ pos: Position): Unit = {
assert(contained != null)
val exception = intercept[T](f)(classTag, pos)
assert(exception.getMessage.contains(contained))
diff --git a/pom.xml b/pom.xml
index e9cee4ce635..df1e0c3b706 100644
--- a/pom.xml
+++ b/pom.xml
@@ -138,7 +138,7 @@
0.9.3
0.62.2
1.17.1
- flink-${flink.version}-bin-scala_${scala.binary.version}.tgz
+ flink-${flink.version}-bin-scala_2.12.tgz
${apache.archive.dist}/flink/flink-${flink.version}
false
3.0.2
@@ -198,7 +198,8 @@
-->
3.4.1
3.4
- spark-${spark.version}-bin-hadoop3.tgz
+
+ spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz
${apache.archive.dist}/spark/spark-${spark.version}
false
3.42.0.0
@@ -2135,6 +2136,7 @@
2.13
2.13.8
+ -scala${scala.binary.version}
@@ -2207,7 +2209,7 @@
3.2.4
3.2
2.0.2
- spark-${spark.version}-bin-hadoop3.2.tgz
+ spark-${spark.version}-bin-hadoop3.2${spark.archive.scala.suffix}.tgz
org.scalatest.tags.Slow