diff --git a/extensions/spark/kyuubi-extension-spark-3-1/pom.xml b/extensions/spark/kyuubi-extension-spark-3-1/pom.xml
index a7fcbabe5b4..cbd1267cd94 100644
--- a/extensions/spark/kyuubi-extension-spark-3-1/pom.xml
+++ b/extensions/spark/kyuubi-extension-spark-3-1/pom.xml
@@ -137,6 +137,14 @@
log4j-slf4j-impl
test
+
+
+ org.apache.kyuubi
+ kyuubi-util-scala_${scala.binary.version}
+ ${project.version}
+ test-jar
+ test
+
diff --git a/extensions/spark/kyuubi-extension-spark-3-2/pom.xml b/extensions/spark/kyuubi-extension-spark-3-2/pom.xml
index b1ddcecf84e..05a9ecd3f85 100644
--- a/extensions/spark/kyuubi-extension-spark-3-2/pom.xml
+++ b/extensions/spark/kyuubi-extension-spark-3-2/pom.xml
@@ -137,6 +137,14 @@
log4j-slf4j-impl
test
+
+
+ org.apache.kyuubi
+ kyuubi-util-scala_${scala.binary.version}
+ ${project.version}
+ test-jar
+ test
+
diff --git a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
index 2d3eec95722..2a89b185d07 100644
--- a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
+++ b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.types._
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
+import org.apache.kyuubi.util.AssertionUtils._
trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
@@ -69,10 +70,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
- }
- assert(e.getMessage == "Filters are only supported for partitioned table")
+ }("Filters are only supported for partitioned table")
sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
@@ -201,9 +201,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException](
- sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
- assert(e.getMessage == "Only partition column filters are allowed")
+ interceptEquals[KyuubiSQLExtensionException] {
+ sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
+ }("Only partition column filters are allowed")
sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")
@@ -232,10 +232,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
- val msg = intercept[KyuubiSQLExtensionException] {
+ interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE t ZORDER BY c1, c2")
- }.getMessage
- assert(msg.contains("only support hive table"))
+ }("only support hive table")
}
}
@@ -735,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
- val e1 = intercept[KyuubiSQLExtensionException] {
+ interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
- }
- assert(e1.getMessage.contains("unsupported partition predicates"))
+ }("unsupported partition predicates")
- val e2 = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
- }
- assert(e2.getMessage == "Only partition column filters are allowed")
+ }("Only partition column filters are allowed")
}
}
diff --git a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
index 2d3eec95722..a03eb50da30 100644
--- a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
+++ b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.types._
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
+import org.apache.kyuubi.util.AssertionUtils._
trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
@@ -69,10 +70,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
- }
- assert(e.getMessage == "Filters are only supported for partitioned table")
+ }("Filters are only supported for partitioned table")
sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
@@ -201,9 +201,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException](
- sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
- assert(e.getMessage == "Only partition column filters are allowed")
+ val e = interceptEquals[KyuubiSQLExtensionException] {
+ sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
+ }("Only partition column filters are allowed")
sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")
@@ -232,10 +232,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
- val msg = intercept[KyuubiSQLExtensionException] {
+ interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE t ZORDER BY c1, c2")
- }.getMessage
- assert(msg.contains("only support hive table"))
+ }("only support hive table")
}
}
@@ -735,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
- val e1 = intercept[KyuubiSQLExtensionException] {
+ interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
- }
- assert(e1.getMessage.contains("unsupported partition predicates"))
+ }("unsupported partition predicates")
- val e2 = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
- }
- assert(e2.getMessage == "Only partition column filters are allowed")
+ }("Only partition column filters are allowed")
}
}
diff --git a/extensions/spark/kyuubi-extension-spark-common/pom.xml b/extensions/spark/kyuubi-extension-spark-common/pom.xml
index 259931a2e2f..1cd41a25d6b 100644
--- a/extensions/spark/kyuubi-extension-spark-common/pom.xml
+++ b/extensions/spark/kyuubi-extension-spark-common/pom.xml
@@ -122,6 +122,14 @@
log4j-slf4j-impl
test
+
+
+ org.apache.kyuubi
+ kyuubi-util-scala_${scala.binary.version}
+ ${project.version}
+ test-jar
+ test
+
diff --git a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
index e0f86f85d84..15766e87270 100644
--- a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
+++ b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
@@ -32,6 +32,7 @@ import org.apache.spark.sql.types._
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
+import org.apache.kyuubi.util.AssertionUtils._
trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
@@ -70,10 +71,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
- }
- assert(e.getMessage == "Filters are only supported for partitioned table")
+ }("Filters are only supported for partitioned table")
sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
@@ -202,9 +202,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")
- val e = intercept[KyuubiSQLExtensionException](
- sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
- assert(e.getMessage == "Only partition column filters are allowed")
+ interceptEquals[KyuubiSQLExtensionException] {
+ sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
+ }("Only partition column filters are allowed")
sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")
@@ -233,10 +233,8 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
- val msg = intercept[KyuubiSQLExtensionException] {
- sql("OPTIMIZE t ZORDER BY c1, c2")
- }.getMessage
- assert(msg.contains("only support hive table"))
+ interceptContains[KyuubiSQLExtensionException] { sql("OPTIMIZE t ZORDER BY c1, c2") }(
+ "only support hive table")
}
}
@@ -736,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
- val e1 = intercept[KyuubiSQLExtensionException] {
+ interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
- }
- assert(e1.getMessage.contains("unsupported partition predicates"))
+ }("unsupported partition predicates")
- val e2 = intercept[KyuubiSQLExtensionException] {
+ interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
- }
- assert(e2.getMessage == "Only partition column filters are allowed")
+ }("Only partition column filters are allowed")
}
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AuthzSessionSigningSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AuthzSessionSigningSuite.scala
index 080ec013f5a..b3a87ac173b 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AuthzSessionSigningSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AuthzSessionSigningSuite.scala
@@ -28,6 +28,7 @@ import org.scalatest.funsuite.AnyFunSuite
import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_SESSION_SIGN_PUBLICKEY, KYUUBI_SESSION_USER_KEY, KYUUBI_SESSION_USER_SIGN}
import org.apache.kyuubi.plugin.spark.authz.{AccessControlException, SparkSessionProvider}
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.SignUtils
class AuthzSessionSigningSuite extends AnyFunSuite
@@ -62,13 +63,13 @@ class AuthzSessionSigningSuite extends AnyFunSuite
// fake session user name
val fakeSessionUser = "faker"
sc.setLocalProperty(KYUUBI_SESSION_USER_KEY, fakeSessionUser)
- val e1 = intercept[AccessControlException](AuthZUtils.getAuthzUgi(sc))
- assertResult(s"Invalid user identifier [$fakeSessionUser]")(e1.getMessage)
+ interceptEquals[AccessControlException](AuthZUtils.getAuthzUgi(sc))(
+ s"Invalid user identifier [$fakeSessionUser]")
sc.setLocalProperty(KYUUBI_SESSION_USER_KEY, kyuubiSessionUser)
// invalid session user sign
sc.setLocalProperty(KYUUBI_SESSION_USER_SIGN, "invalid_sign")
- val e2 = intercept[AccessControlException](AuthZUtils.getAuthzUgi(sc))
- assertResult(s"Invalid user identifier [$kyuubiSessionUser]")(e2.getMessage)
+ interceptEquals[AccessControlException](AuthZUtils.getAuthzUgi(sc))(
+ s"Invalid user identifier [$kyuubiSessionUser]")
}
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
index 193446bb24f..b181d501092 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
@@ -25,7 +25,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.tags.HudiTest
-import org.apache.kyuubi.util.AssertionUtils.interceptContains
+import org.apache.kyuubi.util.AssertionUtils._
/**
* Tests for RangerSparkExtensionSuite on Hudi SQL.
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
index 28e13aff3c0..9f13bb8032f 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
@@ -103,12 +103,9 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
""".stripMargin
// MergeIntoTable: Using a MERGE INTO Statement
- val e1 = intercept[AccessControlException](
- doAs(
- someone,
- sql(mergeIntoSql)))
- assert(e1.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(mergeIntoSql))
+ }(s"does not have [select] privilege on [$namespace1/$table1/id]")
withSingleCallEnabled {
interceptContains[AccessControlException](doAs(someone, sql(mergeIntoSql)))(
@@ -181,8 +178,8 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
doAs(
createOnlyUser, {
- val e = intercept[AccessControlException](sql(select).collect())
- assert(e.getMessage === errorMessage("select", s"$namespace1/$table/key"))
+ interceptEquals[AccessControlException](sql(select).collect())(
+ errorMessage("select", s"$namespace1/$table/key"))
})
}
}
@@ -240,10 +237,9 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
}
test("[KYUUBI #4255] DESCRIBE TABLE") {
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain()))
- assert(e1.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain())
+ }(s"does not have [select] privilege on [$namespace1/$table1]")
}
test("CALL RewriteDataFilesProcedure") {
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index 8e1fe058739..9840447cddb 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -35,6 +35,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.ranger.RuleAuthorization.KYUUBI_AUTHZ_TAG
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
abstract class RangerSparkExtensionSuite extends AnyFunSuite
with SparkSessionProvider with BeforeAndAfterAll {
@@ -169,15 +170,12 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
val alter = s"ALTER DATABASE $testDb SET DBPROPERTIES (abc = '123')"
val drop = s"DROP DATABASE IF EXISTS $testDb"
- val e = intercept[AccessControlException](sql(create))
- assert(e.getMessage === errorMessage("create", "mydb"))
+ interceptEquals[AccessControlException](sql(create))(errorMessage("create", "mydb"))
withCleanTmpResources(Seq((testDb, "database"))) {
doAs(admin, assert(Try { sql(create) }.isSuccess))
doAs(admin, assert(Try { sql(alter) }.isSuccess))
- val e1 = intercept[AccessControlException](sql(alter))
- assert(e1.getMessage === errorMessage("alter", "mydb"))
- val e2 = intercept[AccessControlException](sql(drop))
- assert(e2.getMessage === errorMessage("drop", "mydb"))
+ interceptEquals[AccessControlException](sql(alter))(errorMessage("alter", "mydb"))
+ interceptEquals[AccessControlException](sql(drop))(errorMessage("drop", "mydb"))
doAs(kent, Try(sql("SHOW DATABASES")).isSuccess)
}
}
@@ -191,15 +189,13 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
val alter0 = s"ALTER TABLE $db.$table SET TBLPROPERTIES(key='ak')"
val drop0 = s"DROP TABLE IF EXISTS $db.$table"
val select = s"SELECT * FROM $db.$table"
- val e = intercept[AccessControlException](sql(create0))
- assert(e.getMessage === errorMessage("create"))
+ interceptEquals[AccessControlException](sql(create0))(errorMessage("create"))
withCleanTmpResources(Seq((s"$db.$table", "table"))) {
doAs(bob, assert(Try { sql(create0) }.isSuccess))
doAs(bob, assert(Try { sql(alter0) }.isSuccess))
- val e1 = intercept[AccessControlException](sql(drop0))
- assert(e1.getMessage === errorMessage("drop"))
+ interceptEquals[AccessControlException](sql(drop0))(errorMessage("drop"))
doAs(bob, assert(Try { sql(alter0) }.isSuccess))
doAs(bob, assert(Try { sql(select).collect() }.isSuccess))
doAs(kent, assert(Try { sql(s"SELECT key FROM $db.$table").collect() }.isSuccess))
@@ -215,8 +211,8 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
doAs(
kent, {
withClue(q) {
- val e = intercept[AccessControlException](sql(q).collect())
- assert(e.getMessage === errorMessage("select", "default/src/value", kent))
+ interceptEquals[AccessControlException](sql(q).collect())(
+ errorMessage("select", "default/src/value", kent))
}
})
}
@@ -228,10 +224,8 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
val func = "func"
val create0 = s"CREATE FUNCTION IF NOT EXISTS $db.$func AS 'abc.mnl.xyz'"
doAs(
- kent, {
- val e = intercept[AccessControlException](sql(create0))
- assert(e.getMessage === errorMessage("create", "default/func"))
- })
+ kent,
+ interceptEquals[AccessControlException](sql(create0))(errorMessage("create", "default/func")))
doAs(admin, assert(Try(sql(create0)).isSuccess))
}
@@ -463,8 +457,8 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
withCleanTmpResources(Seq((s"$fun", "function"))) {
doAs(admin, sql(s"CREATE FUNCTION $fun AS 'Function1'"))
doAs(admin, sql(s"DESC FUNCTION $fun").collect().length == 1)
- val e = intercept[AccessControlException](doAs(denyUser, sql(s"DESC FUNCTION $fun")))
- assert(e.getMessage === errorMessage("_any", "default/function1", denyUser))
+ interceptEquals[AccessControlException](doAs(denyUser, sql(s"DESC FUNCTION $fun")))(
+ errorMessage("_any", "default/function1", denyUser))
}
}
}
@@ -536,17 +530,17 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
doAs(admin, sql(s"CREATE VIEW ${adminPermView} AS SELECT * FROM $table"))
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"CREATE VIEW $permView AS SELECT 1 as a")))
- assert(e1.getMessage.contains(s"does not have [create] privilege on [default/$permView]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"CREATE VIEW $permView AS SELECT 1 as a"))
+ }(s"does not have [create] privilege on [default/$permView]")
- val e2 = intercept[AccessControlException](
- doAs(someone, sql(s"CREATE VIEW $permView AS SELECT * FROM $table")))
- if (isSparkV32OrGreater) {
- assert(e2.getMessage.contains(s"does not have [select] privilege on [default/$table/id]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"CREATE VIEW $permView AS SELECT * FROM $table"))
+ }(if (isSparkV32OrGreater) {
+ s"does not have [select] privilege on [default/$table/id]"
} else {
- assert(e2.getMessage.contains(s"does not have [select] privilege on [$table]"))
- }
+ s"does not have [select] privilege on [$table]"
+ })
}
}
@@ -562,12 +556,9 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
doAs(admin, sql(s"CREATE VIEW $db1.$permView AS SELECT * FROM $db1.$table"))
// KYUUBI #3326: with no privileges to the permanent view or the source table
- val e1 = intercept[AccessControlException](
- doAs(
- someone, {
- sql(s"select * from $db1.$permView").collect()
- }))
- assert(e1.getMessage.contains(s"does not have [select] privilege on [$db1/$permView/id]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"select * from $db1.$permView").collect())
+ }(s"does not have [select] privilege on [$db1/$permView/id]")
}
}
@@ -625,17 +616,17 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
s" FROM $db1.$srcTable1 as tb1" +
s" JOIN $db1.$srcTable2 as tb2" +
s" on tb1.id = tb2.id"
- val e1 = intercept[AccessControlException](doAs(someone, sql(insertSql1)))
- assert(e1.getMessage.contains(s"does not have [select] privilege on [$db1/$srcTable1/id]"))
+ interceptContains[AccessControlException](doAs(someone, sql(insertSql1)))(
+ s"does not have [select] privilege on [$db1/$srcTable1/id]")
withSingleCallEnabled {
- val e2 = intercept[AccessControlException](doAs(someone, sql(insertSql1)))
- assert(e2.getMessage.contains(s"does not have" +
- s" [select] privilege on" +
- s" [$db1/$srcTable1/id,$db1/$srcTable1/name,$db1/$srcTable1/city," +
- s"$db1/$srcTable2/age,$db1/$srcTable2/id]," +
- s" [update] privilege on [$db1/$sinkTable1/id,$db1/$sinkTable1/age," +
- s"$db1/$sinkTable1/name,$db1/$sinkTable1/city]"))
+ interceptContains[AccessControlException](doAs(someone, sql(insertSql1)))(
+ s"does not have" +
+ s" [select] privilege on" +
+ s" [$db1/$srcTable1/id,$db1/$srcTable1/name,$db1/$srcTable1/city," +
+ s"$db1/$srcTable2/age,$db1/$srcTable2/id]," +
+ s" [update] privilege on [$db1/$sinkTable1/id,$db1/$sinkTable1/age," +
+ s"$db1/$sinkTable1/name,$db1/$sinkTable1/city]")
}
}
}
@@ -662,10 +653,9 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
sql(s"CREATE TABLE IF NOT EXISTS $db1.$srcTable1" +
s" (id int, name string, city string)"))
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"CACHE TABLE $cacheTable2 select * from $db1.$srcTable1")))
- assert(
- e1.getMessage.contains(s"does not have [select] privilege on [$db1/$srcTable1/id]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"CACHE TABLE $cacheTable2 select * from $db1.$srcTable1"))
+ }(s"does not have [select] privilege on [$db1/$srcTable1/id]")
doAs(admin, sql(s"CACHE TABLE $cacheTable3 SELECT 1 AS a, 2 AS b "))
doAs(someone, sql(s"CACHE TABLE $cacheTable4 select 1 as a, 2 as b "))
@@ -694,8 +684,8 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
doAs(
createOnlyUser, {
- val e = intercept[AccessControlException](sql(select).collect())
- assert(e.getMessage === errorMessage("select", s"$db/$table/key"))
+ interceptEquals[AccessControlException](sql(select).collect())(
+ errorMessage("select", s"$db/$table/key"))
})
}
}
@@ -720,14 +710,14 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name string)"))
- val e = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(
s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' ROW FORMAT DELIMITED FIELDS
| TERMINATED BY ','
- | SELECT * FROM $db1.$table;""".stripMargin)))
- assert(e.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id]"))
+ | SELECT * FROM $db1.$table;""".stripMargin))
+ }(s"does not have [select] privilege on [$db1/$table/id]")
}
}
@@ -737,14 +727,14 @@ class HiveCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name string)"))
- val e = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(
s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir'
| USING parquet
- | SELECT * FROM $db1.$table;""".stripMargin)))
- assert(e.getMessage.contains(s"does not have [select] privilege on [$db1/$table/id]"))
+ | SELECT * FROM $db1.$table;""".stripMargin))
+ }(s"does not have [select] privilege on [$db1/$table/id]")
}
}
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index 046052d558d..dee342cd500 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -26,6 +26,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
+import org.apache.kyuubi.util.AssertionUtils._
/**
* Tests for RangerSparkExtensionSuite
@@ -77,96 +78,85 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSu
test("[KYUUBI #3424] CREATE DATABASE") {
// create database
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"CREATE DATABASE IF NOT EXISTS $catalogV2.$namespace2").explain()))
- assert(e1.getMessage.contains(s"does not have [create] privilege" +
- s" on [$namespace2]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"CREATE DATABASE IF NOT EXISTS $catalogV2.$namespace2").explain())
+ }(s"does not have [create] privilege on [$namespace2]")
}
test("[KYUUBI #3424] DROP DATABASE") {
// create database
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"DROP DATABASE IF EXISTS $catalogV2.$namespace2").explain()))
- assert(e1.getMessage.contains(s"does not have [drop] privilege" +
- s" on [$namespace2]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"DROP DATABASE IF EXISTS $catalogV2.$namespace2").explain())
+ }(s"does not have [drop] privilege on [$namespace2]")
}
test("[KYUUBI #3424] SELECT TABLE") {
// select
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"select city, id from $catalogV2.$namespace1.$table1").explain()))
- assert(e1.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/city]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"select city, id from $catalogV2.$namespace1.$table1").explain())
+ }(s"does not have [select] privilege on [$namespace1/$table1/city]")
}
test("[KYUUBI #4255] DESCRIBE TABLE") {
- val e1 = intercept[AccessControlException](
- doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain()))
- assert(e1.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain())
+ }(s"does not have [select] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] CREATE TABLE") {
// CreateTable
- val e2 = intercept[AccessControlException](
- doAs(someone, sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table2")))
- assert(e2.getMessage.contains(s"does not have [create] privilege" +
- s" on [$namespace1/$table2]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table2"))
+ }(s"does not have [create] privilege on [$namespace1/$table2]")
// CreateTableAsSelect
- val e21 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table2" +
- s" AS select * from $catalogV2.$namespace1.$table1")))
- assert(e21.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ s" AS select * from $catalogV2.$namespace1.$table1"))
+ }(s"does not have [select] privilege on [$namespace1/$table1/id]")
}
test("[KYUUBI #3424] DROP TABLE") {
// DropTable
- val e3 = intercept[AccessControlException](
- doAs(someone, sql(s"DROP TABLE $catalogV2.$namespace1.$table1")))
- assert(e3.getMessage.contains(s"does not have [drop] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"DROP TABLE $catalogV2.$namespace1.$table1"))
+ }(s"does not have [drop] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] INSERT TABLE") {
// AppendData: Insert Using a VALUES Clause
- val e4 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, city)" +
- s" VALUES (1, 'bowenliang123', 'Guangzhou')")))
- assert(e4.getMessage.contains(s"does not have [update] privilege" +
- s" on [$namespace1/$outputTable1]"))
+ s" VALUES (1, 'bowenliang123', 'Guangzhou')"))
+ }(s"does not have [update] privilege on [$namespace1/$outputTable1]")
// AppendData: Insert Using a TABLE Statement
- val e42 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, city)" +
- s" TABLE $catalogV2.$namespace1.$table1")))
- assert(e42.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ s" TABLE $catalogV2.$namespace1.$table1"))
+ }(s"does not have [select] privilege on [$namespace1/$table1/id]")
// AppendData: Insert Using a SELECT Statement
- val e43 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, city)" +
- s" SELECT * from $catalogV2.$namespace1.$table1")))
- assert(e43.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ s" SELECT * from $catalogV2.$namespace1.$table1"))
+ }(s"does not have [select] privilege on [$namespace1/$table1/id]")
// OverwriteByExpression: Insert Overwrite
- val e44 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"INSERT OVERWRITE $catalogV2.$namespace1.$outputTable1 (id, name, city)" +
- s" VALUES (1, 'bowenliang123', 'Guangzhou')")))
- assert(e44.getMessage.contains(s"does not have [update] privilege" +
- s" on [$namespace1/$outputTable1]"))
+ s" VALUES (1, 'bowenliang123', 'Guangzhou')"))
+ }(s"does not have [update] privilege on [$namespace1/$outputTable1]")
}
test("[KYUUBI #3424] MERGE INTO") {
@@ -180,141 +170,119 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSu
""".stripMargin
// MergeIntoTable: Using a MERGE INTO Statement
- val e1 = intercept[AccessControlException](
- doAs(
- someone,
- sql(mergeIntoSql)))
- assert(e1.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(mergeIntoSql))
+ }(s"does not have [select] privilege on [$namespace1/$table1/id]")
withSingleCallEnabled {
- val e2 = intercept[AccessControlException](
- doAs(
- someone,
- sql(mergeIntoSql)))
- assert(e2.getMessage.contains(s"does not have" +
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(mergeIntoSql))
+ }(s"does not have" +
s" [select] privilege" +
s" on [$namespace1/$table1/id,$namespace1/table1/name,$namespace1/$table1/city]," +
- s" [update] privilege on [$namespace1/$outputTable1]"))
+ s" [update] privilege on [$namespace1/$outputTable1]")
}
}
test("[KYUUBI #3424] UPDATE TABLE") {
// UpdateTable
- val e5 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"UPDATE $catalogV2.$namespace1.$table1 SET city='Hangzhou' " +
- " WHERE id=1")))
- assert(e5.getMessage.contains(s"does not have [update] privilege" +
- s" on [$namespace1/$table1]"))
+ " WHERE id=1"))
+ }(s"does not have [update] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] DELETE FROM TABLE") {
// DeleteFromTable
- val e6 = intercept[AccessControlException](
- doAs(someone, sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE id=1")))
- assert(e6.getMessage.contains(s"does not have [update] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE id=1"))
+ }(s"does not have [update] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] CACHE TABLE") {
// CacheTable
- val e7 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"CACHE TABLE $cacheTable1" +
- s" AS select * from $catalogV2.$namespace1.$table1")))
- if (isSparkV32OrGreater) {
- assert(e7.getMessage.contains(s"does not have [select] privilege" +
- s" on [$namespace1/$table1/id]"))
+ s" AS select * from $catalogV2.$namespace1.$table1"))
+ }(if (isSparkV32OrGreater) {
+ s"does not have [select] privilege on [$namespace1/$table1/id]"
} else {
- assert(e7.getMessage.contains(s"does not have [select] privilege" +
- s" on [$catalogV2.$namespace1/$table1]"))
- }
+ s"does not have [select] privilege on [$catalogV2.$namespace1/$table1]"
+ })
}
test("[KYUUBI #3424] TRUNCATE TABLE") {
assume(isSparkV32OrGreater)
- val e1 = intercept[AccessControlException](
- doAs(
- someone,
- sql(s"TRUNCATE TABLE $catalogV2.$namespace1.$table1")))
- assert(e1.getMessage.contains(s"does not have [update] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"TRUNCATE TABLE $catalogV2.$namespace1.$table1"))
+ }(s"does not have [update] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] MSCK REPAIR TABLE") {
assume(isSparkV32OrGreater)
- val e1 = intercept[AccessControlException](
- doAs(
- someone,
- sql(s"MSCK REPAIR TABLE $catalogV2.$namespace1.$table1")))
- assert(e1.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ interceptContains[AccessControlException] {
+ doAs(someone, sql(s"MSCK REPAIR TABLE $catalogV2.$namespace1.$table1"))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] ALTER TABLE") {
// AddColumns
- val e61 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 ADD COLUMNS (age int) ").explain()))
- assert(e61.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 ADD COLUMNS (age int) ").explain())
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
// DropColumns
- val e62 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 DROP COLUMNS city ").explain()))
- assert(e62.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 DROP COLUMNS city ").explain())
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
// RenameColumn
- val e63 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 RENAME COLUMN city TO city2 ").explain()))
- assert(e63.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 RENAME COLUMN city TO city2 ").explain())
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
// AlterColumn
- val e64 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 " +
- s"ALTER COLUMN city COMMENT 'city' ")))
- assert(e64.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ s"ALTER COLUMN city COMMENT 'city' "))
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
}
test("[KYUUBI #3424] COMMENT ON") {
// CommentOnNamespace
- val e1 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"COMMENT ON DATABASE $catalogV2.$namespace1 IS 'xYz' ").explain()))
- assert(e1.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1]"))
+ sql(s"COMMENT ON DATABASE $catalogV2.$namespace1 IS 'xYz' ").explain())
+ }(s"does not have [alter] privilege on [$namespace1]")
// CommentOnNamespace
- val e2 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"COMMENT ON NAMESPACE $catalogV2.$namespace1 IS 'xYz' ").explain()))
- assert(e2.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1]"))
+ sql(s"COMMENT ON NAMESPACE $catalogV2.$namespace1 IS 'xYz' ").explain())
+ }(s"does not have [alter] privilege on [$namespace1]")
// CommentOnTable
- val e3 = intercept[AccessControlException](
+ interceptContains[AccessControlException] {
doAs(
someone,
- sql(s"COMMENT ON TABLE $catalogV2.$namespace1.$table1 IS 'xYz' ").explain()))
- assert(e3.getMessage.contains(s"does not have [alter] privilege" +
- s" on [$namespace1/$table1]"))
+ sql(s"COMMENT ON TABLE $catalogV2.$namespace1.$table1 IS 'xYz' ").explain())
+ }(s"does not have [alter] privilege on [$namespace1/$table1]")
}
}
diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
index f43dafd1163..f38b0489a2f 100644
--- a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
@@ -37,6 +37,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.kyuubi.spark.connector.hive.HiveTableCatalog.IdentifierHelper
import org.apache.kyuubi.spark.connector.hive.read.HiveScan
+import org.apache.kyuubi.util.AssertionUtils._
class HiveCatalogSuite extends KyuubiHiveTest {
@@ -297,21 +298,17 @@ class HiveCatalogSuite extends KyuubiHiveTest {
catalog.dropNamespace(testNs, true)
assert(catalog.namespaceExists(testNs) === false)
- val exc = intercept[NoSuchNamespaceException] {
+ interceptContains[NoSuchNamespaceException] {
assert(catalog.listNamespaces(testNs) === Array())
- }
-
- assert(exc.getMessage.contains(testNs.quoted))
+ }(testNs.quoted)
assert(catalog.namespaceExists(testNs) === false)
}
test("loadNamespaceMetadata: fail missing namespace") {
catalog.dropNamespace(testNs, true)
- val exc = intercept[NoSuchNamespaceException] {
+ interceptContains[NoSuchNamespaceException] {
catalog.loadNamespaceMetadata(testNs)
- }
-
- assert(exc.getMessage.contains(testNs.quoted))
+ }(testNs.quoted)
}
test("loadNamespaceMetadata: non-empty metadata") {
diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala
index 0dd1efdec97..3a0c1fe22ff 100644
--- a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveQuerySuite.scala
@@ -20,6 +20,8 @@ package org.apache.kyuubi.spark.connector.hive
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
+import org.apache.kyuubi.util.AssertionUtils._
+
class HiveQuerySuite extends KyuubiHiveTest {
def withTempNonPartitionedTable(
@@ -196,18 +198,18 @@ class HiveQuerySuite extends KyuubiHiveTest {
withSparkSession() { spark =>
val table = "hive.default.employee"
withTempPartitionedTable(spark, table) {
- val exception = intercept[KyuubiHiveConnectorException] {
+ interceptContains[KyuubiHiveConnectorException] {
spark.sql(
s"""
| INSERT OVERWRITE
| $table PARTITION(year = '', month = '08')
| VALUES("yi")
|""".stripMargin).collect()
- }
- // 1. not thrown `Dynamic partition cannot be the parent of a static partition`
- // 2. thrown `Partition spec is invalid`, should be consist with spark v1.
- assert(exception.message.contains("Partition spec is invalid. The spec (year='') " +
- "contains an empty partition column value"))
+ }(
+ // 1. not thrown `Dynamic partition cannot be the parent of a static partition`
+ // 2. thrown `Partition spec is invalid`, should be consist with spark v1.
+ "Partition spec is invalid. The spec (year='') " +
+ "contains an empty partition column value")
}
}
}
diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
index d6b90cc0419..c23d022b31e 100644
--- a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/CreateNamespaceSuite.scala
@@ -27,6 +27,7 @@ import org.apache.spark.sql.connector.catalog.SupportsNamespaces
import org.apache.spark.sql.internal.SQLConf
import org.apache.kyuubi.spark.connector.hive.command.DDLCommandTestUtils.{V1_COMMAND_VERSION, V2_COMMAND_VERSION}
+import org.apache.kyuubi.util.AssertionUtils._
trait CreateNamespaceSuiteBase extends DDLCommandTestUtils {
override protected def command: String = "CREATE NAMESPACE"
@@ -59,11 +60,9 @@ trait CreateNamespaceSuiteBase extends DDLCommandTestUtils {
val path = tmpDir.getCanonicalPath
assert(!path.startsWith("file:/"))
- val e = intercept[IllegalArgumentException] {
+ interceptContainsAny[IllegalArgumentException] {
sql(s"CREATE NAMESPACE $ns LOCATION ''")
- }
- assert(e.getMessage.contains("Can not create a Path from an empty string") ||
- e.getMessage.contains("The location name cannot be empty string"))
+ }("Can not create a Path from an empty string", "The location name cannot be empty string")
val uri = new Path(path).toUri
sql(s"CREATE NAMESPACE $ns LOCATION '$uri'")
@@ -81,11 +80,11 @@ trait CreateNamespaceSuiteBase extends DDLCommandTestUtils {
withNamespace(ns) {
sql(s"CREATE NAMESPACE $ns")
- val e = intercept[NamespaceAlreadyExistsException] {
+ interceptContainsAny[NamespaceAlreadyExistsException] {
sql(s"CREATE NAMESPACE $ns")
- }
- assert(e.getMessage.contains(s"Namespace '$namespace' already exists") ||
- e.getMessage.contains(s"Cannot create schema `fakens` because it already exists"))
+ }(
+ s"Namespace '$namespace' already exists",
+ s"Cannot create schema `fakens` because it already exists")
// The following will be no-op since the namespace already exists.
Try { sql(s"CREATE NAMESPACE IF NOT EXISTS $ns") }.isSuccess
@@ -97,10 +96,9 @@ trait CreateNamespaceSuiteBase extends DDLCommandTestUtils {
val ns = s"$catalogName.$namespace"
withSQLConf((SQLConf.LEGACY_PROPERTY_NON_RESERVED.key, "false")) {
NAMESPACE_RESERVED_PROPERTIES.filterNot(_ == PROP_COMMENT).foreach { key =>
- val exception = intercept[ParseException] {
+ interceptContains[ParseException] {
sql(s"CREATE NAMESPACE $ns WITH DBPROPERTIES('$key'='dummyVal')")
- }
- assert(exception.getMessage.contains(s"$key is a reserved namespace property"))
+ }(s"$key is a reserved namespace property")
}
}
withSQLConf((SQLConf.LEGACY_PROPERTY_NON_RESERVED.key, "true")) {
diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
index eebfbe48812..35d98f2a0b9 100644
--- a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/command/DropNamespaceSuite.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.types.{StringType, StructType}
import org.apache.kyuubi.spark.connector.common.SparkUtils.SPARK_RUNTIME_VERSION
import org.apache.kyuubi.spark.connector.hive.command.DDLCommandTestUtils.{V1_COMMAND_VERSION, V2_COMMAND_VERSION}
-import org.apache.kyuubi.util.AssertionUtils.interceptContains
+import org.apache.kyuubi.util.AssertionUtils._
trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
override protected def command: String = "DROP NAMESPACE"
@@ -59,11 +59,9 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
test("namespace does not exist") {
// Namespace $catalog.unknown does not exist.
- val message = intercept[AnalysisException] {
+ interceptContainsAny[AnalysisException] {
sql(s"DROP NAMESPACE $catalogName.unknown")
- }.getMessage
- assert(message.contains(s"'unknown' not found") ||
- message.contains(s"The schema `unknown` cannot be found"))
+ }("'unknown' not found", "The schema `unknown` cannot be found")
}
test("drop non-empty namespace with a non-cascading mode") {
diff --git a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala b/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
index f5c6563e770..29d6c78b2da 100644
--- a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
@@ -24,6 +24,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.spark.connector.common.LocalSparkSession.withSparkSession
import org.apache.kyuubi.spark.connector.common.SparkUtils.SPARK_RUNTIME_VERSION
+import org.apache.kyuubi.util.AssertionUtils._
class TPCDSCatalogSuite extends KyuubiFunSuite {
@@ -167,11 +168,9 @@ class TPCDSCatalogSuite extends KyuubiFunSuite {
.set("spark.sql.cbo.enabled", "true")
.set("spark.sql.cbo.planStats.enabled", "true")
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
- val exception = intercept[AnalysisException] {
+ interceptContainsAny[AnalysisException] {
spark.table("tpcds.sf1.nonexistent_table")
- }
- assert(exception.message.contains("Table or view not found")
- || exception.message.contains("TABLE_OR_VIEW_NOT_FOUND"))
+ }("Table or view not found", "TABLE_OR_VIEW_NOT_FOUND")
}
}
}
diff --git a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
index 14415141e63..0e90d9137f5 100644
--- a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
+++ b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
@@ -24,6 +24,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.spark.connector.common.LocalSparkSession.withSparkSession
import org.apache.kyuubi.spark.connector.common.SparkUtils.SPARK_RUNTIME_VERSION
+import org.apache.kyuubi.util.AssertionUtils._
class TPCHCatalogSuite extends KyuubiFunSuite {
@@ -155,11 +156,9 @@ class TPCHCatalogSuite extends KyuubiFunSuite {
.set("spark.sql.cbo.enabled", "true")
.set("spark.sql.cbo.planStats.enabled", "true")
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
- val exception = intercept[AnalysisException] {
+ interceptContainsAny[AnalysisException] {
spark.table("tpch.sf1.nonexistent_table")
- }
- assert(exception.message.contains("Table or view not found")
- || exception.message.contains("TABLE_OR_VIEW_NOT_FOUND"))
+ }("Table or view not found", "TABLE_OR_VIEW_NOT_FOUND")
}
}
}
diff --git a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
index 8e7c35a95a4..006a8d79f33 100644
--- a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
+++ b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
@@ -38,6 +38,7 @@ import org.apache.kyuubi.jdbc.hive.{KyuubiSQLException, KyuubiStatement}
import org.apache.kyuubi.jdbc.hive.common.TimestampTZ
import org.apache.kyuubi.operation.HiveJDBCTestHelper
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.AssertionUtils._
abstract class FlinkOperationSuite extends HiveJDBCTestHelper with WithFlinkTestResources {
@@ -1252,7 +1253,7 @@ abstract class FlinkOperationSuite extends HiveJDBCTestHelper with WithFlinkTest
}
test("test result fetch timeout") {
- val exception = intercept[KyuubiSQLException](
+ interceptEquals[KyuubiSQLException](
withSessionConf()(Map(ENGINE_FLINK_FETCH_TIMEOUT.key -> "60000"))() {
withJdbcStatement("tbl_a") { stmt =>
stmt.executeQuery("create table tbl_a (a int) " +
@@ -1260,7 +1261,6 @@ abstract class FlinkOperationSuite extends HiveJDBCTestHelper with WithFlinkTest
val resultSet = stmt.executeQuery("select * from tbl_a")
while (resultSet.next()) {}
}
- })
- assert(exception.getMessage === "Futures timed out after [60000 milliseconds]")
+ })("Futures timed out after [60000 milliseconds]")
}
}
diff --git a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/PlanOnlyOperationSuite.scala b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/PlanOnlyOperationSuite.scala
index 17c49464fae..2b656466672 100644
--- a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/PlanOnlyOperationSuite.scala
+++ b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/PlanOnlyOperationSuite.scala
@@ -24,6 +24,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.engine.flink.{WithDiscoveryFlinkSQLEngine, WithFlinkSQLEngineLocal}
import org.apache.kyuubi.ha.HighAvailabilityConf.{HA_ENGINE_REF_ID, HA_NAMESPACE}
import org.apache.kyuubi.operation.{AnalyzeMode, ExecutionMode, HiveJDBCTestHelper, ParseMode, PhysicalMode}
+import org.apache.kyuubi.util.AssertionUtils._
class PlanOnlyOperationSuite extends WithFlinkSQLEngineLocal
with HiveJDBCTestHelper with WithDiscoveryFlinkSQLEngine {
@@ -55,9 +56,8 @@ class PlanOnlyOperationSuite extends WithFlinkSQLEngineLocal
test("Plan only operation with session conf") {
withSessionConf()(Map(KyuubiConf.OPERATION_PLAN_ONLY_MODE.key -> AnalyzeMode.name))(Map.empty) {
withJdbcStatement() { statement =>
- val exceptionMsg = intercept[Exception](statement.executeQuery("select 1")).getMessage
- assert(exceptionMsg.contains(
- s"The operation mode ${AnalyzeMode.name} doesn't support in Flink SQL engine."))
+ interceptContains[Exception](statement.executeQuery("select 1"))(
+ s"The operation mode ${AnalyzeMode.name} doesn't support in Flink SQL engine.")
}
}
}
diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/IndividualSparkSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/IndividualSparkSuite.scala
index 8fca1d0ca2b..4265c1063ea 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/IndividualSparkSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/IndividualSparkSuite.scala
@@ -31,6 +31,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.config.KyuubiReservedKeys._
import org.apache.kyuubi.operation.HiveJDBCTestHelper
+import org.apache.kyuubi.util.AssertionUtils._
class SparkEngineSuites extends KyuubiFunSuite {
@@ -63,10 +64,9 @@ class SparkEngineSuites extends KyuubiFunSuite {
try {
statement.setQueryTimeout(5)
forceCancel.set(force)
- val e1 = intercept[SQLTimeoutException] {
- statement.execute("select java_method('java.lang.Thread', 'sleep', 500000L)")
- }.getMessage
- assert(e1.contains("Query timed out"))
+ interceptContains[SQLTimeoutException](
+ statement.execute("select java_method('java.lang.Thread', 'sleep', 500000L)"))(
+ "Query timed out")
assert(index.get() != 0, "The query statement was not executed.")
eventually(Timeout(30.seconds)) {
if (forceCancel.get()) {
@@ -90,11 +90,10 @@ class SparkEngineSuites extends KyuubiFunSuite {
s"spark.${ENGINE_INIT_TIMEOUT.key}" -> String.valueOf(timeout))) {
SparkSQLEngine.setupConf()
SparkSQLEngine.currentEngine = None
- val e1 = intercept[KyuubiException] {
+ interceptStartsWith[KyuubiException] {
SparkSQLEngine.main(Array.empty)
- }.getMessage
+ }("The total engine initialization time")
assert(SparkSQLEngine.currentEngine.isEmpty)
- assert(e1.startsWith("The total engine initialization time"))
}
}
diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelperSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelperSuite.scala
index 6bd0364f754..2b8c1f0414c 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelperSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelperSuite.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.types._
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.engine.spark.schema.SchemaHelper._
+import org.apache.kyuubi.util.AssertionUtils._
class SchemaHelperSuite extends KyuubiFunSuite {
@@ -70,10 +71,10 @@ class SchemaHelperSuite extends KyuubiFunSuite {
assert(toTTypeId(outerSchema(14).dataType) === TTypeId.MAP_TYPE)
assert(toTTypeId(outerSchema(15).dataType) === TTypeId.STRUCT_TYPE)
assert(toTTypeId(outerSchema(16).dataType) === TTypeId.STRING_TYPE)
- val e1 = intercept[IllegalArgumentException](toTTypeId(CharType(1)))
- assert(e1.getMessage === "Unrecognized type name: char(1)")
- val e2 = intercept[IllegalArgumentException](toTTypeId(VarcharType(1)))
- assert(e2.getMessage === "Unrecognized type name: varchar(1)")
+ interceptEquals[IllegalArgumentException](toTTypeId(CharType(1)))(
+ "Unrecognized type name: char(1)")
+ interceptEquals[IllegalArgumentException](toTTypeId(VarcharType(1)))(
+ "Unrecognized type name: varchar(1)")
}
test("toTTypeQualifiers") {
diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/jdbc/KyuubiHiveDriverSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/jdbc/KyuubiHiveDriverSuite.scala
index ae68440df3e..368c5a4fb89 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/jdbc/KyuubiHiveDriverSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/jdbc/KyuubiHiveDriverSuite.scala
@@ -25,6 +25,7 @@ import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
import org.apache.kyuubi.engine.spark.util.SparkCatalogUtils
import org.apache.kyuubi.jdbc.hive.{KyuubiConnection, KyuubiStatement}
import org.apache.kyuubi.tags.IcebergTest
+import org.apache.kyuubi.util.AssertionUtils._
@IcebergTest
class KyuubiHiveDriverSuite extends WithSparkSQLEngine with IcebergSuiteMixin {
@@ -111,10 +112,9 @@ class KyuubiHiveDriverSuite extends WithSparkSQLEngine with IcebergSuiteMixin {
statement.setQueryTimeout(5)
try {
val code = """java.lang.Thread.sleep(500000L)"""
- val e = intercept[SQLTimeoutException] {
+ interceptContains[SQLTimeoutException] {
statement.executeScala(code)
- }.getMessage
- assert(e.contains("Query timed out"))
+ }("Query timed out")
} finally {
statement.close()
connection.close()
diff --git a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
index dec753ad4f6..a12eb232130 100644
--- a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
+++ b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
@@ -18,6 +18,7 @@
package org.apache.kyuubi.engine.trino
import org.apache.kyuubi.KyuubiSQLException
+import org.apache.kyuubi.util.AssertionUtils._
class TrinoStatementSuite extends WithTrinoContainerServer {
@@ -61,8 +62,8 @@ class TrinoStatementSuite extends WithTrinoContainerServer {
test("test exception") {
withTrinoContainer { trinoContext =>
val trinoStatement = TrinoStatement(trinoContext, kyuubiConf, "use kyuubi")
- val e1 = intercept[KyuubiSQLException](trinoStatement.execute().toArray)
- assert(e1.getMessage.contains("Schema does not exist: tpch.kyuubi"))
+ interceptContains[KyuubiSQLException](trinoStatement.execute().toArray)(
+ "Schema does not exist: tpch.kyuubi")
}
}
}
diff --git a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
index 90939a3e4e0..7027243bfb1 100644
--- a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
+++ b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
@@ -29,6 +29,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.engine.trino.{TrinoQueryTests, TrinoStatement, WithTrinoEngine}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.AssertionUtils._
class TrinoOperationSuite extends WithTrinoEngine with TrinoQueryTests {
override def withKyuubiConf: Map[String, String] = Map(
@@ -551,11 +552,9 @@ class TrinoOperationSuite extends WithTrinoEngine with TrinoQueryTests {
test("trino - get functions") {
withJdbcStatement() { statement =>
- val exceptionMsg = intercept[Exception](statement.getConnection.getMetaData.getFunctions(
- null,
- null,
- "abs")).getMessage
- assert(exceptionMsg === KyuubiSQLException.featureNotSupported().getMessage)
+ interceptEquals[Exception] {
+ statement.getConnection.getMetaData.getFunctions(null, null, "abs")
+ }(KyuubiSQLException.featureNotSupported().getMessage)
}
}
diff --git a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/schema/SchemaHelperSuite.scala b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/schema/SchemaHelperSuite.scala
index 6f6bdc25fa4..cf9d8d0a873 100644
--- a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/schema/SchemaHelperSuite.scala
+++ b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/schema/SchemaHelperSuite.scala
@@ -27,6 +27,7 @@ import org.apache.hive.service.rpc.thrift.TTypeId
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.engine.trino.schema.SchemaHelper._
import org.apache.kyuubi.engine.trino.util.TestUtils._
+import org.apache.kyuubi.util.AssertionUtils._
class SchemaHelperSuite extends KyuubiFunSuite {
@@ -83,8 +84,8 @@ class SchemaHelperSuite extends KyuubiFunSuite {
assert(toTTypeId(outerSchema(21).getTypeSignature) === TTypeId.STRING_TYPE)
assert(toTTypeId(outerSchema(22).getTypeSignature) === TTypeId.NULL_TYPE)
- val e1 = intercept[IllegalArgumentException](toTTypeId(textTypeSignature))
- assert(e1.getMessage === "Unrecognized trino type name: text")
+ interceptEquals[IllegalArgumentException](toTTypeId(textTypeSignature))(
+ "Unrecognized trino type name: text")
}
test("toTTypeQualifiers") {
diff --git a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
index 09532efe3d1..6977e0fd695 100644
--- a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
+++ b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
@@ -34,9 +34,9 @@ import org.apache.kyuubi.engine.spark.SparkProcessBuilder
import org.apache.kyuubi.kubernetes.test.MiniKube
import org.apache.kyuubi.operation.SparkQueryTests
import org.apache.kyuubi.session.KyuubiSessionManager
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.Validator.KUBERNETES_EXECUTOR_POD_NAME_PREFIX
import org.apache.kyuubi.zookeeper.ZookeeperConf.ZK_CLIENT_PORT_ADDRESS
-
abstract class SparkOnKubernetesSuiteBase
extends WithKyuubiServer with Logging with BatchTestHelper {
private val apiServerAddress = {
@@ -193,10 +193,10 @@ class KyuubiOperationKubernetesClusterClusterModeSuite
Seq("_123", "spark_exec", "spark@", "a" * 238).foreach { invalid =>
conf.set(KUBERNETES_EXECUTOR_POD_NAME_PREFIX, invalid)
val builder = new SparkProcessBuilder("test", conf)
- val e = intercept[KyuubiException](builder.validateConf)
- assert(e.getMessage === s"'$invalid' in spark.kubernetes.executor.podNamePrefix is" +
- s" invalid. must conform https://kubernetes.io/docs/concepts/overview/" +
- "working-with-objects/names/#dns-subdomain-names and the value length <= 237")
+ interceptEquals[KyuubiException](builder.validateConf)(
+ s"'$invalid' in spark.kubernetes.executor.podNamePrefix is" +
+ s" invalid. must conform https://kubernetes.io/docs/concepts/overview/" +
+ "working-with-objects/names/#dns-subdomain-names and the value length <= 237")
}
// clean test conf
conf.unset(KUBERNETES_EXECUTOR_POD_NAME_PREFIX)
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
index 5973fc6e7a6..3c2129945ac 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/UtilsSuite.scala
@@ -29,6 +29,7 @@ import org.apache.hadoop.security.UserGroupInformation
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.SERVER_SECRET_REDACTION_PATTERN
+import org.apache.kyuubi.util.AssertionUtils._
class UtilsSuite extends KyuubiFunSuite {
@@ -83,10 +84,9 @@ class UtilsSuite extends KyuubiFunSuite {
assert(props("kyuubi.yes") === "yes")
assert(!props.contains("kyuubi.no"))
- val e = intercept[KyuubiException] {
+ interceptContains[KyuubiException] {
Utils.getPropertiesFromFile(Some(new File("invalid-file")))
- }
- assert(e.getMessage contains "Failed when loading Kyuubi properties from")
+ }("Failed when loading Kyuubi properties from")
}
test("create directory") {
@@ -94,8 +94,8 @@ class UtilsSuite extends KyuubiFunSuite {
assert(Files.exists(path))
assert(path.getFileName.toString.startsWith("kyuubi-"))
path.toFile.deleteOnExit()
- val e = intercept[IOException](Utils.createDirectory("/"))
- assert(e.getMessage === "Failed to create a temp directory (under /) after 10 attempts!")
+ interceptEquals[IOException](Utils.createDirectory("/"))(
+ "Failed to create a temp directory (under /) after 10 attempts!")
val path1 = Utils.createDirectory(System.getProperty("java.io.tmpdir"), "kentyao")
assert(Files.exists(path1))
assert(path1.getFileName.toString.startsWith("kentyao-"))
@@ -144,12 +144,12 @@ class UtilsSuite extends KyuubiFunSuite {
assert(conf.getOption("k2").get == "v2")
val args1 = Array[String]("--conf", "k1=v1", "--conf")
- val exception1 = intercept[IllegalArgumentException](Utils.fromCommandLineArgs(args1, conf))
- assert(exception1.getMessage.contains("Illegal size of arguments"))
+ interceptContains[IllegalArgumentException](Utils.fromCommandLineArgs(args1, conf))(
+ "Illegal size of arguments")
val args2 = Array[String]("--conf", "k1=v1", "--conf", "a")
- val exception2 = intercept[IllegalArgumentException](Utils.fromCommandLineArgs(args2, conf))
- assert(exception2.getMessage.contains("Illegal argument: a"))
+ interceptContains[IllegalArgumentException](Utils.fromCommandLineArgs(args2, conf))(
+ "Illegal argument: a")
}
test("redact sensitive information in command line args") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigBuilderSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigBuilderSuite.scala
index 78429d27c9f..e0996c64ed2 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigBuilderSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigBuilderSuite.scala
@@ -55,8 +55,8 @@ class ConfigBuilderSuite extends KyuubiFunSuite {
KyuubiConf.register(booleanConf)
val kyuubiConf = KyuubiConf().set(booleanConf.key, "invalid conf")
- val e = intercept[IllegalArgumentException](kyuubiConf.get(booleanConf))
- assert(e.getMessage === "kyuubi.boolean.conf should be boolean, but was invalid conf")
+ interceptEquals[IllegalArgumentException](kyuubiConf.get(booleanConf))(
+ "kyuubi.boolean.conf should be boolean, but was invalid conf")
}
test("string config") {
@@ -110,8 +110,8 @@ class ConfigBuilderSuite extends KyuubiFunSuite {
assert(timeConf.defaultVal.get === 3)
val kyuubiConf = KyuubiConf().set(timeConf.key, "invalid")
KyuubiConf.register(timeConf)
- val e = intercept[IllegalArgumentException](kyuubiConf.get(timeConf))
- assert(e.getMessage startsWith "The formats accepted are 1) based on the ISO-8601")
+ interceptStartsWith[IllegalArgumentException](kyuubiConf.get(timeConf))(
+ "The formats accepted are 1) based on the ISO-8601")
}
test("invalid config") {
@@ -123,8 +123,8 @@ class ConfigBuilderSuite extends KyuubiFunSuite {
assert(intConf.defaultVal.get === 3)
val kyuubiConf = KyuubiConf().set(intConf.key, "-1")
KyuubiConf.register(intConf)
- val e = intercept[IllegalArgumentException](kyuubiConf.get(intConf))
- assert(e.getMessage equals "'-1' in kyuubi.invalid.config is invalid. must be positive integer")
+ interceptEquals[IllegalArgumentException](kyuubiConf.get(intConf))(
+ "'-1' in kyuubi.invalid.config is invalid. must be positive integer")
}
test("invalid config for enum") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigEntrySuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigEntrySuite.scala
index 94ad104af6a..aa790c134e2 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigEntrySuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/ConfigEntrySuite.scala
@@ -18,6 +18,7 @@
package org.apache.kyuubi.config
import org.apache.kyuubi.KyuubiFunSuite
+import org.apache.kyuubi.util.AssertionUtils._
class ConfigEntrySuite extends KyuubiFunSuite {
@@ -49,7 +50,7 @@ class ConfigEntrySuite extends KyuubiFunSuite {
KyuubiConf.register(e1)
val conf = KyuubiConf()
assert(conf.get(e1).isEmpty)
- val e = intercept[IllegalArgumentException](new OptionalConfigEntry[Int](
+ interceptEquals[IllegalArgumentException](new OptionalConfigEntry[Int](
"kyuubi.int.spark",
List.empty[String],
s => s.toInt + 1,
@@ -58,8 +59,7 @@ class ConfigEntrySuite extends KyuubiFunSuite {
"",
"int",
false,
- false))
- assert(e.getMessage ===
+ false))(
"requirement failed: Config entry kyuubi.int.spark already registered!")
conf.set(e1.key, "2")
assert(conf.get(e1) === Some(3))
@@ -182,10 +182,9 @@ class ConfigEntrySuite extends KyuubiFunSuite {
KyuubiConf.unregister(config)
- val exception = intercept[IllegalArgumentException](conf.get(config))
- assert(exception.getMessage.contains("requirement failed: " +
+ interceptContains[IllegalArgumentException](conf.get(config))("requirement failed: " +
"ConfigEntry(key=kyuubi.unregistered.spark, defaultValue=3.0, " +
- "doc=doc, version=, type=double) is not registered"))
+ "doc=doc, version=, type=double) is not registered")
}
test("support alternative keys in ConfigBuilder") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala
index 39e68f0ecfa..b03845383d6 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/config/KyuubiConfSuite.scala
@@ -20,6 +20,7 @@ package org.apache.kyuubi.config
import java.time.Duration
import org.apache.kyuubi.KyuubiFunSuite
+import org.apache.kyuubi.util.AssertionUtils._
class KyuubiConfSuite extends KyuubiFunSuite {
@@ -115,8 +116,7 @@ class KyuubiConfSuite extends KyuubiFunSuite {
kyuubiConf.set(OPERATION_IDLE_TIMEOUT.key, " 1000 ")
assert(kyuubiConf.get(OPERATION_IDLE_TIMEOUT) === 1000L)
kyuubiConf.set(OPERATION_IDLE_TIMEOUT.key, "1000A")
- val e = intercept[IllegalArgumentException](kyuubiConf.get(OPERATION_IDLE_TIMEOUT))
- assert(e.getMessage.contains("ISO-8601"))
+ interceptContains[IllegalArgumentException](kyuubiConf.get(OPERATION_IDLE_TIMEOUT))("ISO-8601")
kyuubiConf.set(OPERATION_IDLE_TIMEOUT.key, " P1DT2H3.2S ")
assert(kyuubiConf.get(OPERATION_IDLE_TIMEOUT) ===
@@ -137,8 +137,7 @@ class KyuubiConfSuite extends KyuubiFunSuite {
kyuubiConf.set(OPERATION_QUERY_TIMEOUT.key, " 1000 ")
assert(kyuubiConf.get(OPERATION_QUERY_TIMEOUT) === Some(1000L))
kyuubiConf.set(OPERATION_QUERY_TIMEOUT.key, "1000A")
- val e = intercept[IllegalArgumentException](kyuubiConf.get(OPERATION_QUERY_TIMEOUT))
- assert(e.getMessage.contains("ISO-8601"))
+ interceptContains[IllegalArgumentException](kyuubiConf.get(OPERATION_QUERY_TIMEOUT))("ISO-8601")
kyuubiConf.set(OPERATION_QUERY_TIMEOUT.key, " P1DT2H3.2S ")
assert(kyuubiConf.get(OPERATION_QUERY_TIMEOUT) ===
@@ -149,8 +148,9 @@ class KyuubiConfSuite extends KyuubiFunSuite {
.toMillis))
kyuubiConf.set(OPERATION_QUERY_TIMEOUT.key, "0")
- val e1 = intercept[IllegalArgumentException](kyuubiConf.get(OPERATION_QUERY_TIMEOUT))
- assert(e1.getMessage.contains("must >= 1s if set"))
+ interceptContains[IllegalArgumentException] {
+ kyuubiConf.get(OPERATION_QUERY_TIMEOUT)
+ }("must >= 1s if set")
}
test("kyuubi conf engine.share.level.subdomain valid path test") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HiveMetadataTests.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HiveMetadataTests.scala
index aad31d5b8d4..38764854a92 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HiveMetadataTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/HiveMetadataTests.scala
@@ -23,6 +23,7 @@ import scala.util.Random
import org.apache.kyuubi.{KYUUBI_VERSION, KyuubiSQLException, Utils}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
+import org.apache.kyuubi.util.AssertionUtils._
// For `hive` external catalog only
trait HiveMetadataTests extends SparkMetadataTests {
@@ -208,8 +209,7 @@ trait HiveMetadataTests extends SparkMetadataTests {
() => metaData.getFunctionColumns("", "%", "%", "%"),
() => metaData.getPseudoColumns("", "%", "%", "%"),
() => metaData.generatedKeyAlwaysReturned).foreach { func =>
- val e = intercept[SQLFeatureNotSupportedException](func())
- assert(e.getMessage === "Method not supported")
+ interceptEquals[SQLFeatureNotSupportedException](func())("Method not supported")
}
assert(metaData.allTablesAreSelectable)
@@ -260,16 +260,14 @@ trait HiveMetadataTests extends SparkMetadataTests {
assert(metaData.getDefaultTransactionIsolation === java.sql.Connection.TRANSACTION_NONE)
assert(!metaData.supportsTransactions)
assert(!metaData.getProcedureColumns("", "%", "%", "%").next())
- val e1 = intercept[SQLException] {
+ interceptContains[SQLException] {
metaData.getPrimaryKeys("", "default", "src").next()
- }
- assert(e1.getMessage.contains(KyuubiSQLException.featureNotSupported().getMessage))
+ }(KyuubiSQLException.featureNotSupported().getMessage)
assert(!metaData.getImportedKeys("", "default", "").next())
- val e2 = intercept[SQLException] {
+ interceptContains[SQLException] {
metaData.getCrossReference("", "default", "src", "", "default", "src2").next()
- }
- assert(e2.getMessage.contains(KyuubiSQLException.featureNotSupported().getMessage))
+ }(KyuubiSQLException.featureNotSupported().getMessage)
assert(!metaData.getIndexInfo("", "default", "src", true, true).next())
assert(metaData.supportsResultSetType(new Random().nextInt()))
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
index 0ac56e3bcf0..d20b7722373 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
@@ -28,6 +28,7 @@ import org.apache.hive.service.rpc.thrift.{TExecuteStatementReq, TFetchResultsRe
import org.apache.kyuubi.{KYUUBI_VERSION, Utils}
import org.apache.kyuubi.config.KyuubiConf
+import org.apache.kyuubi.util.AssertionUtils._
trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
@@ -135,10 +136,9 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
test("query time out shall respect client-side if no server-side control") {
withJdbcStatement() { statement =>
statement.setQueryTimeout(1)
- val e = intercept[SQLTimeoutException] {
+ interceptContains[SQLTimeoutException] {
statement.execute("select java_method('java.lang.Thread', 'sleep', 10000L)")
- }.getMessage
- assert(e.contains("Query timed out after"))
+ }("Query timed out after")
statement.setQueryTimeout(0)
val rs1 = statement.executeQuery(
@@ -333,8 +333,7 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
| .map {
| x => (x, x + 1, x * 2)
|""".stripMargin
- val e = intercept[SQLException](statement.executeQuery(incompleteCode))
- assert(e.getMessage contains "Incomplete code:")
+ interceptContains[SQLException](statement.executeQuery(incompleteCode))("Incomplete code:")
}
}
@@ -348,8 +347,7 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
| .range(0, 10, 2, 1)
| .map { x => (x, x + 1, y * 2) } // y is missing
|""".stripMargin
- val e = intercept[SQLException](statement.executeQuery(incompleteCode))
- assert(e.getMessage contains "not found: value y")
+ interceptContains[SQLException](statement.executeQuery(incompleteCode))("not found: value y")
}
}
@@ -428,8 +426,8 @@ trait SparkQueryTests extends SparkDataTypeTests with HiveJDBCTestHelper {
withSessionConf()(Map(KyuubiConf.OPERATION_LANGUAGE.key -> "SQL"))(Map.empty) {
withJdbcStatement() { statement =>
statement.executeQuery(s"set ${KyuubiConf.OPERATION_LANGUAGE.key}=AAA")
- val e = intercept[SQLException](statement.executeQuery("select 1"))
- assert(e.getMessage.contains("The operation language UNKNOWN doesn't support"))
+ interceptContains[SQLException](statement.executeQuery("select 1"))(
+ "The operation language UNKNOWN doesn't support")
statement.executeQuery(s"set ${KyuubiConf.OPERATION_LANGUAGE.key}=SQL")
val result = statement.executeQuery("select 1")
assert(result.next())
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/log/OperationLogSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/log/OperationLogSuite.scala
index 570a8159bcf..a00017a4cdf 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/log/OperationLogSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/log/OperationLogSuite.scala
@@ -29,6 +29,7 @@ import org.apache.kyuubi.{KyuubiFunSuite, KyuubiSQLException, Utils}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.operation.{FetchOrientation, OperationHandle}
import org.apache.kyuubi.session.NoopSessionManager
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.ThriftUtils
class OperationLogSuite extends KyuubiFunSuite {
@@ -159,8 +160,8 @@ class OperationLogSuite extends KyuubiFunSuite {
log1.write("some msg here \n")
log1.close()
log1.write("some msg here again")
- val e = intercept[KyuubiSQLException](log1.read(-1))
- assert(e.getMessage.contains(s"${sHandle.identifier}/${oHandle.identifier}"))
+ interceptContains[KyuubiSQLException](log1.read(-1))(
+ s"${sHandle.identifier}/${oHandle.identifier}")
}
test("test fail to init operation log root dir") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/ServerableSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/ServerableSuite.scala
index 817e309f02d..abf9207ec3d 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/ServerableSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/ServerableSuite.scala
@@ -19,6 +19,7 @@ package org.apache.kyuubi.service
import org.apache.kyuubi.{KyuubiException, KyuubiFunSuite}
import org.apache.kyuubi.config.KyuubiConf
+import org.apache.kyuubi.util.AssertionUtils._
class ServerableSuite extends KyuubiFunSuite {
@@ -44,9 +45,9 @@ class ServerableSuite extends KyuubiFunSuite {
test("invalid port") {
val conf = KyuubiConf().set(KyuubiConf.FRONTEND_THRIFT_BINARY_BIND_PORT, 100)
- val e = intercept[IllegalArgumentException](
- new NoopTBinaryFrontendServer().initialize(conf))
- assert(e.getMessage.contains("Invalid Port number"))
+ interceptContains[IllegalArgumentException] {
+ new NoopTBinaryFrontendServer().initialize(conf)
+ }("Invalid Port number")
}
test("error start child services") {
@@ -55,8 +56,7 @@ class ServerableSuite extends KyuubiFunSuite {
.set("kyuubi.test.server.should.fail", "true")
val server = new NoopTBinaryFrontendServer()
server.initialize(conf)
- val e = intercept[IllegalArgumentException](server.start())
- assert(e.getMessage === "should fail")
+ interceptEquals[IllegalArgumentException](server.start())("should fail")
conf
.set("kyuubi.test.server.should.fail", "false")
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/AuthenticationProviderFactorySuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/AuthenticationProviderFactorySuite.scala
index 0bd29ac56a9..711718b5ac3 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/AuthenticationProviderFactorySuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/AuthenticationProviderFactorySuite.scala
@@ -21,6 +21,7 @@ import javax.security.sasl.AuthenticationException
import org.apache.kyuubi.{KyuubiFunSuite, Utils}
import org.apache.kyuubi.config.KyuubiConf
+import org.apache.kyuubi.util.AssertionUtils._
class AuthenticationProviderFactorySuite extends KyuubiFunSuite {
@@ -31,11 +32,11 @@ class AuthenticationProviderFactorySuite extends KyuubiFunSuite {
val p1 = getAuthenticationProvider(AuthMethods.withName("NONE"), conf)
p1.authenticate(Utils.currentUser, "")
val p2 = getAuthenticationProvider(AuthMethods.withName("LDAP"), conf)
- val e1 = intercept[AuthenticationException](p2.authenticate("test", "test"))
- assert(e1.getMessage.contains("Error validating LDAP user:"))
- val e2 = intercept[AuthenticationException](
- AuthenticationProviderFactory.getAuthenticationProvider(null, conf))
- assert(e2.getMessage === "Not a valid authentication method")
+ interceptContains[AuthenticationException](
+ p2.authenticate("test", "test"))("Error validating LDAP user:")
+ interceptEquals[AuthenticationException](
+ AuthenticationProviderFactory.getAuthenticationProvider(null, conf))(
+ "Not a valid authentication method")
}
}
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/CustomAuthenticationProviderImplSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/CustomAuthenticationProviderImplSuite.scala
index 6135535e1d7..3cdd4d631e9 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/CustomAuthenticationProviderImplSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/CustomAuthenticationProviderImplSuite.scala
@@ -22,22 +22,22 @@ import javax.security.sasl.AuthenticationException
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.service.authentication.AuthenticationProviderFactory.getAuthenticationProvider
+import org.apache.kyuubi.util.AssertionUtils._
class CustomAuthenticationProviderImplSuite extends KyuubiFunSuite {
test("Test user defined authentication") {
val conf = KyuubiConf()
- val e1 = intercept[AuthenticationException](
- getAuthenticationProvider(AuthMethods.withName("CUSTOM"), conf))
- assert(e1.getMessage.contains(
- "authentication.custom.class must be set when auth method was CUSTOM."))
+ interceptContains[AuthenticationException](
+ getAuthenticationProvider(AuthMethods.withName("CUSTOM"), conf))(
+ "authentication.custom.class must be set when auth method was CUSTOM.")
conf.set(
KyuubiConf.AUTHENTICATION_CUSTOM_CLASS,
classOf[UserDefineAuthenticationProviderImpl].getCanonicalName)
val p1 = getAuthenticationProvider(AuthMethods.withName("CUSTOM"), conf)
- val e2 = intercept[AuthenticationException](p1.authenticate("test", "test"))
- assert(e2.getMessage.contains("Username or password is not valid!"))
+ interceptContains[AuthenticationException](p1.authenticate("test", "test"))(
+ "Username or password is not valid!")
p1.authenticate("user", "password")
}
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/JdbcAuthenticationProviderImplSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/JdbcAuthenticationProviderImplSuite.scala
index 4642eb910e6..dd32c1a208b 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/JdbcAuthenticationProviderImplSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/JdbcAuthenticationProviderImplSuite.scala
@@ -26,6 +26,7 @@ import com.zaxxer.hikari.util.DriverDataSource
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.JdbcUtils
class JdbcAuthenticationProviderImplSuite extends KyuubiFunSuite {
@@ -69,39 +70,36 @@ class JdbcAuthenticationProviderImplSuite extends KyuubiFunSuite {
val providerImpl = new JdbcAuthenticationProviderImpl(conf)
providerImpl.authenticate(authUser, authPasswd)
- val e1 = intercept[AuthenticationException] {
+ interceptContains[AuthenticationException] {
providerImpl.authenticate("", "")
- }
- assert(e1.getMessage.contains("user is null"))
+ }("user is null")
val wrong_password = "wrong_password"
- val e4 = intercept[AuthenticationException] {
+ interceptContains[AuthenticationException] {
providerImpl.authenticate(authUser, wrong_password)
- }
- assert(e4.isInstanceOf[AuthenticationException])
- assert(e4.getMessage.contains(s"Password does not match or no such user. " +
+ }(s"Password does not match or no such user. " +
s"user: $authUser, " +
- s"password: ${"*" * wrong_password.length}(length:${wrong_password.length})"))
+ s"password: ${"*" * wrong_password.length}(length:${wrong_password.length})")
var _conf = conf.clone
_conf.unset(AUTHENTICATION_JDBC_URL)
- val e5 = intercept[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }
- assert(e5.getMessage.contains("JDBC url is not configured"))
+ interceptContains[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }(
+ "JDBC url is not configured")
_conf = conf.clone
_conf.unset(AUTHENTICATION_JDBC_QUERY)
- val e8 = intercept[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }
- assert(e8.getMessage.contains("Query SQL is not configured"))
+ interceptContains[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }(
+ "Query SQL is not configured")
_conf.set(
AUTHENTICATION_JDBC_QUERY,
"INSERT INTO user_auth (user, password) VALUES ('demouser','demopassword');")
- val e9 = intercept[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }
- assert(e9.getMessage.contains("Query SQL must start with 'SELECT'"))
+ interceptContains[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }(
+ "Query SQL must start with 'SELECT'")
_conf.unset(AUTHENTICATION_JDBC_URL)
- val e10 = intercept[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }
- assert(e10.getMessage.contains("JDBC url is not configured"))
+ interceptContains[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }(
+ "JDBC url is not configured")
_conf = conf.clone
_conf.set(AUTHENTICATION_JDBC_QUERY, "SELECT 1 FROM user_auth")
@@ -117,9 +115,8 @@ class JdbcAuthenticationProviderImplSuite extends KyuubiFunSuite {
_conf.set(
AUTHENTICATION_JDBC_QUERY,
"SELECT 1 FROM user_auth WHERE user=${unsupported_placeholder} and username=${user}")
- val e11 = intercept[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }
- assert(e11.getMessage.contains(
- "Unsupported placeholder in Query SQL: ${unsupported_placeholder}"))
+ interceptContains[IllegalArgumentException] { new JdbcAuthenticationProviderImpl(_conf) }(
+ "Unsupported placeholder in Query SQL: ${unsupported_placeholder}")
// unknown field
_conf.set(
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala
index 316c9b2dfdf..5e8df52a8cd 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/KyuubiAuthenticationFactorySuite.scala
@@ -35,10 +35,9 @@ class KyuubiAuthenticationFactorySuite extends KyuubiFunSuite {
val kyuubiConf = KyuubiConf()
val hadoopConf = KyuubiHadoopUtils.newHadoopConf(kyuubiConf)
- val e1 = intercept[KyuubiSQLException] {
+ interceptEquals[KyuubiSQLException] {
verifyProxyAccess("kent", "yao", "localhost", hadoopConf)
- }
- assert(e1.getMessage === "Failed to validate proxy privilege of kent for yao")
+ }("Failed to validate proxy privilege of kent for yao")
kyuubiConf.set("hadoop.proxyuser.kent.groups", "*")
kyuubiConf.set("hadoop.proxyuser.kent.hosts", "*")
@@ -74,8 +73,8 @@ class KyuubiAuthenticationFactorySuite extends KyuubiFunSuite {
val conf = KyuubiConf().set(KyuubiConf.AUTHENTICATION_METHOD, Set("KERBEROS"))
val factory = new KyuubiAuthenticationFactory(conf)
- val e = intercept[LoginException](factory.getTTransportFactory)
- assert(e.getMessage startsWith "Kerberos principal should have 3 parts")
+ interceptStartsWith[LoginException](factory.getTTransportFactory)(
+ "Kerberos principal should have 3 parts")
}
test("AuthType is NOSASL if only NOSASL is specified") {
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/LdapAuthenticationProviderImplSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/LdapAuthenticationProviderImplSuite.scala
index f10bf7ce2df..7a9edc93ba2 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/LdapAuthenticationProviderImplSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/LdapAuthenticationProviderImplSuite.scala
@@ -28,6 +28,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.service.authentication.ldap.{DirSearch, DirSearchFactory, LdapSearchFactory}
import org.apache.kyuubi.service.authentication.ldap.LdapUtils.getUserName
+import org.apache.kyuubi.util.AssertionUtils._
class LdapAuthenticationProviderImplSuite extends WithLdapServer {
@@ -49,10 +50,8 @@ class LdapAuthenticationProviderImplSuite extends WithLdapServer {
test("authenticateGivenBlankOrNullPassword") {
Seq("", "\u0000", null).foreach { pwd =>
auth = new LdapAuthenticationProviderImpl(conf, new LdapSearchFactory)
- val thrown = intercept[AuthenticationException] {
- auth.authenticate("user", pwd)
- }
- assert(thrown.getMessage.contains("is null or contains blank space"))
+ interceptContains[AuthenticationException] { auth.authenticate("user", pwd) }(
+ "is null or contains blank space")
}
}
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLHelperSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLHelperSuite.scala
index d4290a2c6dd..1e2f8ac89d8 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLHelperSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLHelperSuite.scala
@@ -25,6 +25,7 @@ import org.apache.kyuubi.{KYUUBI_VERSION, KyuubiFunSuite}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.service.{NoopTBinaryFrontendServer, TBinaryFrontendService}
import org.apache.kyuubi.service.authentication.PlainSASLServer.SaslPlainProvider
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.SemanticVersion
class PlainSASLHelperSuite extends KyuubiFunSuite {
@@ -39,19 +40,16 @@ class PlainSASLHelperSuite extends KyuubiFunSuite {
val tProcessor = tProcessorFactory.getProcessor(tSocket)
assert(tProcessor.isInstanceOf[TSetIpAddressProcessor[_]])
- val e = intercept[IllegalArgumentException] {
+ interceptEquals[IllegalArgumentException] {
PlainSASLHelper.getTransportFactory("KERBEROS", conf)
- }
- assert(e.getMessage === "Illegal authentication type KERBEROS for plain transport")
- val e2 = intercept[IllegalArgumentException] {
+ }("Illegal authentication type KERBEROS for plain transport")
+ interceptEquals[IllegalArgumentException] {
PlainSASLHelper.getTransportFactory("NOSASL", conf)
- }
- assert(e2.getMessage === "Illegal authentication type NOSASL for plain transport")
+ }("Illegal authentication type NOSASL for plain transport")
- val e3 = intercept[IllegalArgumentException] {
+ interceptEquals[IllegalArgumentException] {
PlainSASLHelper.getTransportFactory("ELSE", conf)
- }
- assert(e3.getMessage === "Illegal authentication type ELSE for plain transport")
+ }("Illegal authentication type ELSE for plain transport")
val tTransportFactory = PlainSASLHelper.getTransportFactory("NONE", conf)
assert(tTransportFactory.isInstanceOf[TSaslServerTransport.Factory])
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/SaslQOPSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/SaslQOPSuite.scala
index 6cf2793d244..912acf7cc5b 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/SaslQOPSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/SaslQOPSuite.scala
@@ -20,6 +20,7 @@ package org.apache.kyuubi.service.authentication
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.SASL_QOP
+import org.apache.kyuubi.util.AssertionUtils._
class SaslQOPSuite extends KyuubiFunSuite {
@@ -31,10 +32,9 @@ class SaslQOPSuite extends KyuubiFunSuite {
assert(SaslQOP.withName(conf.get(SASL_QOP)) === q)
}
conf.set(SASL_QOP, "abc")
- val e = intercept[IllegalArgumentException](conf.get(SASL_QOP))
- assert(e.getMessage ===
+ interceptEquals[IllegalArgumentException] { conf.get(SASL_QOP) }(
"The value of kyuubi.authentication.sasl.qop should be one of" +
- " auth, auth-int, auth-conf, but was abc")
+ " auth, auth-int, auth-conf, but was abc")
}
}
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SignUtilsSuite.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SignUtilsSuite.scala
index 90863196bd1..0a8c4a23d9a 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SignUtilsSuite.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/util/SignUtilsSuite.scala
@@ -20,6 +20,7 @@ import java.security.InvalidParameterException
import java.util.Base64
import org.apache.kyuubi.KyuubiFunSuite
+import org.apache.kyuubi.util.AssertionUtils._
class SignUtilsSuite extends KyuubiFunSuite {
test("generate key pair") {
@@ -28,8 +29,8 @@ class SignUtilsSuite extends KyuubiFunSuite {
assert(publicKey !== null)
val invalidAlgorithm = "invalidAlgorithm"
- val e1 = intercept[InvalidParameterException](SignUtils.generateKeyPair(invalidAlgorithm))
- assert(e1.getMessage == s"algorithm $invalidAlgorithm not supported for key pair generation")
+ interceptEquals[InvalidParameterException](SignUtils.generateKeyPair(invalidAlgorithm))(
+ s"algorithm $invalidAlgorithm not supported for key pair generation")
}
test("sign/verify with key pair") {
diff --git a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
index 9caf3864640..0ac63a79d23 100644
--- a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
+++ b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
@@ -26,6 +26,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.ha.HighAvailabilityConf.{HA_ADDRESSES, HA_NAMESPACE}
import org.apache.kyuubi.ha.client.DiscoveryClientProvider.withDiscoveryClient
import org.apache.kyuubi.service._
+import org.apache.kyuubi.util.AssertionUtils._
trait DiscoveryClientTests extends KyuubiFunSuite {
@@ -144,10 +145,9 @@ trait DiscoveryClientTests extends KyuubiFunSuite {
withDiscoveryClient(conf) { discoveryClient =>
assert(lockLatch.await(20000, TimeUnit.MILLISECONDS))
- val e = intercept[KyuubiSQLException] {
+ interceptContains[KyuubiSQLException] {
discoveryClient.tryWithLock(lockPath, 5000) {}
- }
- assert(e.getMessage contains s"Timeout to lock on path [$lockPath]")
+ }(s"Timeout to lock on path [$lockPath]")
}
}
diff --git a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/zookeeper/ZookeeperDiscoveryClientSuite.scala b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/zookeeper/ZookeeperDiscoveryClientSuite.scala
index dd78e1fb8a0..9aabe260b58 100644
--- a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/zookeeper/ZookeeperDiscoveryClientSuite.scala
+++ b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/zookeeper/ZookeeperDiscoveryClientSuite.scala
@@ -39,6 +39,7 @@ import org.apache.kyuubi.shaded.curator.framework.CuratorFrameworkFactory
import org.apache.kyuubi.shaded.curator.retry.ExponentialBackoffRetry
import org.apache.kyuubi.shaded.zookeeper.ZooDefs
import org.apache.kyuubi.shaded.zookeeper.data.ACL
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.reflect.ReflectUtils._
import org.apache.kyuubi.zookeeper.EmbeddedZookeeper
import org.apache.kyuubi.zookeeper.ZookeeperConf.ZK_CLIENT_PORT
@@ -131,9 +132,8 @@ abstract class ZookeeperDiscoveryClientSuite extends DiscoveryClientTests
assert(options("useKeyTab").toString.toBoolean)
conf.set(HA_ZK_AUTH_KEYTAB.key, s"${keytab.getName}")
- val e = intercept[IOException](setUpZooKeeperAuth(conf))
- assert(
- e.getMessage === s"${HA_ZK_AUTH_KEYTAB.key}: ${getKeyTabFile(conf).get} does not exists")
+ interceptEquals[IOException](setUpZooKeeperAuth(conf))(
+ s"${HA_ZK_AUTH_KEYTAB.key}: ${getKeyTabFile(conf).get} does not exists")
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala
index 012f4df1608..c9c662a4db9 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/WithKyuubiServerOnYarn.scala
@@ -32,6 +32,7 @@ import org.apache.kyuubi.operation.{FetchOrientation, HiveJDBCTestHelper, Operat
import org.apache.kyuubi.operation.OperationState.ERROR
import org.apache.kyuubi.server.MiniYarnService
import org.apache.kyuubi.session.{KyuubiBatchSession, KyuubiSessionManager}
+import org.apache.kyuubi.util.AssertionUtils._
/**
* To developers:
@@ -201,12 +202,11 @@ class KyuubiOperationYarnClusterSuite extends WithKyuubiServerOnYarn with HiveJD
ENGINE_INIT_TIMEOUT.key -> "PT10M",
KYUUBI_BATCH_ID_KEY -> UUID.randomUUID().toString))(Map.empty) {
val startTime = System.currentTimeMillis()
- val exception = intercept[Exception] {
+ interceptContains[Exception] {
withJdbcStatement() { _ => }
- }
+ }("The engine application has been terminated.")
val elapsedTime = System.currentTimeMillis() - startTime
assert(elapsedTime < 60 * 1000)
- assert(exception.getMessage contains "The engine application has been terminated.")
}
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/KyuubiApplicationManagerSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/KyuubiApplicationManagerSuite.scala
index 0f54520fc77..b8281745c67 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/KyuubiApplicationManagerSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/KyuubiApplicationManagerSuite.scala
@@ -20,6 +20,7 @@ package org.apache.kyuubi.engine
import org.apache.kyuubi.{KyuubiException, KyuubiFunSuite}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.engine.KubernetesApplicationOperation.LABEL_KYUUBI_UNIQUE_KEY
+import org.apache.kyuubi.util.AssertionUtils._
class KyuubiApplicationManagerSuite extends KyuubiFunSuite {
test("application access path") {
@@ -32,10 +33,9 @@ class KyuubiApplicationManagerSuite extends KyuubiFunSuite {
KyuubiApplicationManager.checkApplicationAccessPath(path, noLocalDirLimitConf)
path = "/apache/kyuubijar"
- var e = intercept[KyuubiException] {
+ interceptContains[KyuubiException] {
KyuubiApplicationManager.checkApplicationAccessPath(path, localDirLimitConf)
- }
- assert(e.getMessage.contains("is not in the local dir allow list"))
+ }("is not in the local dir allow list")
KyuubiApplicationManager.checkApplicationAccessPath(path, noLocalDirLimitConf)
path = "hdfs:/apache/kyuubijar"
@@ -43,10 +43,9 @@ class KyuubiApplicationManagerSuite extends KyuubiFunSuite {
KyuubiApplicationManager.checkApplicationAccessPath(path, noLocalDirLimitConf)
path = "path/to/kyuubijar"
- e = intercept[KyuubiException] {
+ interceptContains[KyuubiException] {
KyuubiApplicationManager.checkApplicationAccessPath(path, localDirLimitConf)
- }
- assert(e.getMessage.contains("please use absolute path"))
+ }("please use absolute path")
KyuubiApplicationManager.checkApplicationAccessPath(path, noLocalDirLimitConf)
var appConf = Map("spark.files" -> "/apache/kyuubi/jars/a.jar")
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
index f85e363d39e..b6c531acb62 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/jdbc/JdbcProcessBuilderSuite.scala
@@ -19,6 +19,7 @@ package org.apache.kyuubi.engine.jdbc
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.{ENGINE_JDBC_CONNECTION_PASSWORD, ENGINE_JDBC_CONNECTION_URL, ENGINE_JDBC_EXTRA_CLASSPATH, ENGINE_JDBC_JAVA_OPTIONS, ENGINE_JDBC_MEMORY}
+import org.apache.kyuubi.util.AssertionUtils._
class JdbcProcessBuilderSuite extends KyuubiFunSuite {
@@ -37,10 +38,9 @@ class JdbcProcessBuilderSuite extends KyuubiFunSuite {
}
test("capture error from jdbc process builder") {
- val e1 = intercept[IllegalArgumentException](
- new JdbcProcessBuilder("kyuubi", KyuubiConf()).processBuilder)
- assert(e1.getMessage contains
- s"Jdbc server url can not be null! Please set ${ENGINE_JDBC_CONNECTION_URL.key}")
+ interceptContains[IllegalArgumentException] {
+ new JdbcProcessBuilder("kyuubi", KyuubiConf()).processBuilder
+ }(s"Jdbc server url can not be null! Please set ${ENGINE_JDBC_CONNECTION_URL.key}")
}
test("default engine memory") {
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
index 16a7f728ea6..7f39e5c976c 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
@@ -30,6 +30,7 @@ import org.apache.kyuubi.jdbc.KyuubiHiveDriver
import org.apache.kyuubi.jdbc.hive.{KyuubiSQLException, KyuubiStatement}
import org.apache.kyuubi.operation.HiveJDBCTestHelper
import org.apache.kyuubi.tags.PySparkTest
+import org.apache.kyuubi.util.AssertionUtils._
@PySparkTest
class PySparkTests extends WithKyuubiServer with HiveJDBCTestHelper {
@@ -75,13 +76,11 @@ class PySparkTests extends WithKyuubiServer with HiveJDBCTestHelper {
statement.setQueryTimeout(5)
try {
var code = "spark.sql(\"select java_method('java.lang.Thread', 'sleep', 10000L)\").show()"
- var e = intercept[SQLTimeoutException] {
+ interceptContains[SQLTimeoutException] {
statement.executePython(code)
- }.getMessage
- assert(e.contains("Query timed out"))
+ }("Query timed out")
code = "bad_code"
- e = intercept[KyuubiSQLException](statement.executePython(code)).getMessage
- assert(e.contains("Interpret error"))
+ interceptContains[KyuubiSQLException] { statement.executePython(code) }("Interpret error")
} finally {
statement.close()
connection.close()
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
index 2c37c41bc4b..aa0e8e5c1eb 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/trino/TrinoProcessBuilderSuite.scala
@@ -21,6 +21,7 @@ import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.config.KyuubiReservedKeys.KYUUBI_SESSION_USER_KEY
+import org.apache.kyuubi.util.AssertionUtils._
class TrinoProcessBuilderSuite extends KyuubiFunSuite {
@@ -38,10 +39,9 @@ class TrinoProcessBuilderSuite extends KyuubiFunSuite {
}
test("capture error from trino process builder") {
- val e1 = intercept[IllegalArgumentException](
- new TrinoProcessBuilder("kyuubi", KyuubiConf()).processBuilder)
- assert(e1.getMessage contains
- s"Trino server url can not be null! Please set ${ENGINE_TRINO_CONNECTION_URL.key}")
+ interceptContains[IllegalArgumentException] {
+ new TrinoProcessBuilder("kyuubi", KyuubiConf()).processBuilder
+ }(s"Trino server url can not be null! Please set ${ENGINE_TRINO_CONNECTION_URL.key}")
}
test("default engine memory") {
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationManagerSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationManagerSuite.scala
index 1bd750e6aed..cdf01383948 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationManagerSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationManagerSuite.scala
@@ -22,6 +22,7 @@ import java.sql.SQLTimeoutException
import org.apache.kyuubi.WithKyuubiServer
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf._
+import org.apache.kyuubi.util.AssertionUtils._
class KyuubiOperationManagerSuite extends WithKyuubiServer with HiveJDBCTestHelper {
override protected val conf: KyuubiConf = {
@@ -52,10 +53,9 @@ class KyuubiOperationManagerSuite extends WithKyuubiServer with HiveJDBCTestHelp
withJdbcStatement() { statement =>
Range(-1, 20, 5).foreach { clientTimeout =>
statement.setQueryTimeout(clientTimeout)
- val e = intercept[SQLTimeoutException] {
+ interceptContains[SQLTimeoutException] {
statement.executeQuery("select java_method('java.lang.Thread', 'sleep', 10000L)")
- }.getMessage
- assert(e.contains("Query timed out after"))
+ }("Query timed out after")
}
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationPerConnectionSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationPerConnectionSuite.scala
index 97ab21998b9..2504ce2649d 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationPerConnectionSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/KyuubiOperationPerConnectionSuite.scala
@@ -35,6 +35,7 @@ import org.apache.kyuubi.jdbc.hive.{KyuubiConnection, KyuubiSQLException}
import org.apache.kyuubi.metrics.{MetricsConstants, MetricsSystem}
import org.apache.kyuubi.plugin.SessionConfAdvisor
import org.apache.kyuubi.session.{KyuubiSessionImpl, KyuubiSessionManager, SessionHandle, SessionType}
+import org.apache.kyuubi.util.AssertionUtils._
/**
* UT with Connection level engine shared cost much time, only run basic jdbc tests.
@@ -291,8 +292,7 @@ class KyuubiOperationPerConnectionSuite extends WithKyuubiServer with HiveJDBCTe
statement.executeQuery(s"set ${KyuubiConf.OPERATION_INCREMENTAL_COLLECT.key}=true;")
val resultSet = statement.executeQuery(
"SELECT raise_error('client should catch this exception');")
- val e = intercept[KyuubiSQLException](resultSet.next())
- assert(e.getMessage.contains("client should catch this exception"))
+ interceptContains[KyuubiSQLException](resultSet.next())("client should catch this exception")
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
index 8773440a686..9d9966d2e71 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/PlanOnlyOperationSuite.scala
@@ -22,6 +22,7 @@ import java.sql.Statement
import org.apache.kyuubi.WithKyuubiServer
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.jdbc.hive.KyuubiSQLException
+import org.apache.kyuubi.util.AssertionUtils._
class PlanOnlyOperationSuite extends WithKyuubiServer with HiveJDBCTestHelper {
@@ -191,8 +192,8 @@ class PlanOnlyOperationSuite extends WithKyuubiServer with HiveJDBCTestHelper {
withSessionConf()(Map(KyuubiConf.OPERATION_PLAN_ONLY_MODE.key -> "parse"))(Map.empty) {
withJdbcStatement() { statement =>
statement.executeQuery(s"set ${KyuubiConf.OPERATION_PLAN_ONLY_MODE.key}=parser")
- val e = intercept[KyuubiSQLException](statement.executeQuery("select 1"))
- assert(e.getMessage.contains("Unknown planOnly mode: parser"))
+ interceptContains[KyuubiSQLException](statement.executeQuery("select 1"))(
+ "Unknown planOnly mode: parser")
statement.executeQuery(s"set ${KyuubiConf.OPERATION_PLAN_ONLY_MODE.key}=parse")
val result = statement.executeQuery("select 1")
assert(result.next())
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/plugin/PluginLoaderSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/plugin/PluginLoaderSuite.scala
index e24b79c2cb5..e0d6dc9b747 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/plugin/PluginLoaderSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/plugin/PluginLoaderSuite.scala
@@ -22,6 +22,7 @@ import scala.collection.JavaConverters._
import org.apache.kyuubi.{KyuubiException, KyuubiFunSuite}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.session.{FileSessionConfAdvisor, HadoopGroupProvider}
+import org.apache.kyuubi.util.AssertionUtils._
class PluginLoaderSuite extends KyuubiFunSuite {
@@ -30,16 +31,14 @@ class PluginLoaderSuite extends KyuubiFunSuite {
assert(PluginLoader.loadSessionConfAdvisor(conf).isInstanceOf[DefaultSessionConfAdvisor])
conf.set(KyuubiConf.SESSION_CONF_ADVISOR, classOf[InvalidSessionConfAdvisor].getName)
- val msg1 = intercept[KyuubiException] {
+ interceptContains[KyuubiException] {
PluginLoader.loadSessionConfAdvisor(conf)
- }.getMessage
- assert(msg1.contains(s"is not a child of '${classOf[SessionConfAdvisor].getName}'"))
+ }(s"is not a child of '${classOf[SessionConfAdvisor].getName}'")
conf.set(KyuubiConf.SESSION_CONF_ADVISOR, "non.exists")
- val msg2 = intercept[IllegalArgumentException] {
+ interceptStartsWith[IllegalArgumentException] {
PluginLoader.loadSessionConfAdvisor(conf)
- }.getMessage
- assert(msg2.startsWith("Error while instantiating 'non.exists'"))
+ }("Error while instantiating 'non.exists'")
}
test("FileSessionConfAdvisor") {
@@ -74,16 +73,14 @@ class PluginLoaderSuite extends KyuubiFunSuite {
assert(PluginLoader.loadGroupProvider(conf).isInstanceOf[HadoopGroupProvider])
conf.set(KyuubiConf.GROUP_PROVIDER, classOf[InvalidGroupProvider].getName)
- val msg1 = intercept[KyuubiException] {
+ interceptContains[KyuubiException] {
PluginLoader.loadGroupProvider(conf)
- }.getMessage
- assert(msg1.contains(s"is not a child of '${classOf[GroupProvider].getName}'"))
+ }(s"is not a child of '${classOf[GroupProvider].getName}'")
conf.set(KyuubiConf.GROUP_PROVIDER, "non.exists")
- val msg2 = intercept[IllegalArgumentException] {
+ interceptStartsWith[IllegalArgumentException] {
PluginLoader.loadGroupProvider(conf)
- }.getMessage
- assert(msg2.startsWith("Error while instantiating 'non.exists'"))
+ }("Error while instantiating 'non.exists'")
}
test("HadoopGroupProvider") {
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KinitAuxiliaryServiceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KinitAuxiliaryServiceSuite.scala
index 96aa269410d..eb1efbf5033 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KinitAuxiliaryServiceSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KinitAuxiliaryServiceSuite.scala
@@ -22,6 +22,7 @@ import org.apache.hadoop.security.UserGroupInformation
import org.apache.kyuubi.KerberizedTestHelper
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.service.ServiceState
+import org.apache.kyuubi.util.AssertionUtils._
class KinitAuxiliaryServiceSuite extends KerberizedTestHelper {
@@ -40,8 +41,8 @@ class KinitAuxiliaryServiceSuite extends KerberizedTestHelper {
tryWithSecurityEnabled {
val service = new KinitAuxiliaryService()
val conf = KyuubiConf()
- val e = intercept[IllegalArgumentException](service.initialize(conf))
- assert(e.getMessage === "requirement failed: principal or keytab is missing")
+ interceptEquals[IllegalArgumentException](service.initialize(conf))(
+ "requirement failed: principal or keytab is missing")
conf.set(KyuubiConf.SERVER_PRINCIPAL, testPrincipal)
.set(KyuubiConf.SERVER_KEYTAB, testKeytab)
.set(KyuubiConf.KINIT_INTERVAL, 0L)
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiMySQLFrontendServiceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiMySQLFrontendServiceSuite.scala
index 4bf8b8eda55..7db1d381fab 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiMySQLFrontendServiceSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiMySQLFrontendServiceSuite.scala
@@ -22,6 +22,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.config.KyuubiConf.{FRONTEND_ADVERTISED_HOST, FRONTEND_MYSQL_BIND_HOST, FRONTEND_MYSQL_BIND_PORT}
import org.apache.kyuubi.service.NoopMySQLFrontendServer
import org.apache.kyuubi.service.ServiceState._
+import org.apache.kyuubi.util.AssertionUtils._
class KyuubiMySQLFrontendServiceSuite extends KyuubiFunSuite {
@@ -31,8 +32,8 @@ class KyuubiMySQLFrontendServiceSuite extends KyuubiFunSuite {
val conf = KyuubiConf()
assert(server.getServices.isEmpty)
assert(server.getServiceState === LATENT)
- val e = intercept[IllegalStateException](server.frontendServices.head.connectionUrl)
- assert(e.getMessage startsWith "Illegal Service State: LATENT")
+ interceptStartsWith[IllegalStateException] { server.frontendServices.head.connectionUrl }(
+ "Illegal Service State: LATENT")
assert(server.getConf === null)
server.initialize(conf)
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiServerSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiServerSuite.scala
index c8a7c9a3de8..b67f02a2f76 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiServerSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/KyuubiServerSuite.scala
@@ -20,6 +20,7 @@ package org.apache.kyuubi.server
import org.apache.kyuubi.KyuubiFunSuite
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.service.ServiceState._
+import org.apache.kyuubi.util.AssertionUtils._
class KyuubiServerSuite extends KyuubiFunSuite {
@@ -60,7 +61,7 @@ class KyuubiServerSuite extends KyuubiFunSuite {
test("invalid port") {
val conf = KyuubiConf().set(KyuubiConf.FRONTEND_THRIFT_BINARY_BIND_PORT, 100)
- val e = intercept[IllegalArgumentException](new KyuubiServer().initialize(conf))
- assert(e.getMessage contains "Invalid Port number")
+ interceptContains[IllegalArgumentException](new KyuubiServer().initialize(conf))(
+ "Invalid Port number")
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
index 1ace5861264..4068a728bcc 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
@@ -29,6 +29,7 @@ import org.apache.kyuubi.{KyuubiFunSuite, KyuubiSQLException, TrinoRestFrontendT
import org.apache.kyuubi.server.trino.api.{Query, TrinoContext}
import org.apache.kyuubi.server.trino.api.v1.dto.Ok
import org.apache.kyuubi.session.SessionHandle
+import org.apache.kyuubi.util.AssertionUtils._
class StatementResourceSuite extends KyuubiFunSuite with TrinoRestFrontendTestHelper {
@@ -88,8 +89,8 @@ class StatementResourceSuite extends KyuubiFunSuite with TrinoRestFrontendTestHe
s"${Query.KYUUBI_SESSION_ID}=${TrinoContext.urlEncode(sessionHandle.identifier.toString)}")
.delete()
assert(nextResponse.getStatus == 204)
- val exception = intercept[KyuubiSQLException](sessionManager.getSession(sessionHandle))
- assert(exception.getMessage === s"Invalid $sessionHandle")
+ interceptEquals[KyuubiSQLException](sessionManager.getSession(sessionHandle))(
+ s"Invalid $sessionHandle")
}
private def getData(current: TrinoResponse): TrinoResponse = {
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/ui/JettyUtilsSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/ui/JettyUtilsSuite.scala
index 1f5bdd16a7f..796a0fcc03a 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/ui/JettyUtilsSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/ui/JettyUtilsSuite.scala
@@ -18,6 +18,7 @@
package org.apache.kyuubi.server.ui
import org.apache.kyuubi.{KyuubiException, KyuubiFunSuite}
+import org.apache.kyuubi.util.AssertionUtils._
class JettyUtilsSuite extends KyuubiFunSuite {
@@ -25,8 +26,8 @@ class JettyUtilsSuite extends KyuubiFunSuite {
val contextPath = "/static"
val handler = JettyUtils.createStaticHandler("org/apache/kyuubi/ui/static", contextPath)
assert(handler.getContextPath === contextPath)
- val e = intercept[KyuubiException](JettyUtils
- .createStaticHandler("org/apache/kyuubi/ui/static/nonexists", contextPath))
- assert(e.getMessage.startsWith("Could not find resource path"))
+ interceptStartsWith[KyuubiException] {
+ JettyUtils.createStaticHandler("org/apache/kyuubi/ui/static/nonexists", contextPath)
+ }("Could not find resource path")
}
}
diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionLimiterSuite.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionLimiterSuite.scala
index 775239f9b09..e6640af4959 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionLimiterSuite.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/session/SessionLimiterSuite.scala
@@ -23,6 +23,7 @@ import scala.collection.JavaConverters._
import scala.util.Random
import org.apache.kyuubi.{KyuubiFunSuite, KyuubiSQLException}
+import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.ThreadUtils
class SessionLimiterSuite extends KyuubiFunSuite {
@@ -143,13 +144,10 @@ class SessionLimiterSuite extends KyuubiFunSuite {
limiter.asInstanceOf[SessionLimiterImpl].counters().asScala.values
.foreach(c => assert(c.get() == 0))
- val caught = intercept[KyuubiSQLException] {
+ interceptEquals[KyuubiSQLException] {
val userIpAddress = UserIpAddress("user002", ipAddress)
limiter.increment(userIpAddress)
- }
-
- assert(caught.getMessage.equals(
- "Connection denied because the user is in the deny user list. (user: user002)"))
+ }("Connection denied because the user is in the deny user list. (user: user002)")
}
test("test refresh unlimited users and deny users") {
diff --git a/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala b/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
index 9d33993b9d2..029297f9ce4 100644
--- a/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
+++ b/kyuubi-util-scala/src/test/scala/org/apache/kyuubi/util/AssertionUtils.scala
@@ -155,9 +155,40 @@ object AssertionUtils {
*/
def interceptContains[T <: Exception](f: => Any)(contained: String)(implicit
classTag: ClassTag[T],
+ prettifier: Prettifier,
pos: Position): Unit = {
assert(contained != null)
val exception = intercept[T](f)(classTag, pos)
- assert(exception.getMessage.contains(contained))
+ val exceptionMessage = exception.getMessage
+ withClue(s"'$exceptionMessage' expected containing '$contained'") {
+ assert(exception.getMessage.contains(contained))(prettifier, pos)
+ }
+ }
+
+ def interceptContainsAny[T <: Exception](f: => Any)(contained: String*)(implicit
+ classTag: ClassTag[T],
+ prettifier: Prettifier,
+ pos: Position): Unit = {
+ val exception = intercept[T](f)(classTag, pos)
+ val exceptionMessage = exception.getMessage
+ withClue(s"'$exceptionMessage' expected containing any of [${contained.mkString(",")}]") {
+ assert(contained.exists(exceptionMessage.contains(_)))(prettifier, pos)
+ }
+ }
+
+ /**
+ * Asserts that the given function throws an exception of the given type
+ * and with the exception message starting with the prefix string
+ */
+ def interceptStartsWith[T <: Exception](f: => Any)(prefix: String)(implicit
+ classTag: ClassTag[T],
+ prettifier: Prettifier,
+ pos: Position): Unit = {
+ assert(prefix != null)
+ val exception = intercept[T](f)(classTag, pos)
+ val exceptionMessage = exception.getMessage
+ withClue(s"'$exceptionMessage' expected starting with '$prefix''") {
+ assert(exception.getMessage.startsWith(prefix))(prettifier, pos)
+ }
}
}