Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
bowenliang123 committed Oct 19, 2023
1 parent c4cdf18 commit a6b47f1
Show file tree
Hide file tree
Showing 60 changed files with 453 additions and 469 deletions.
8 changes: 8 additions & 0 deletions extensions/spark/kyuubi-extension-spark-3-1/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,14 @@
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
8 changes: 8 additions & 0 deletions extensions/spark/kyuubi-extension-spark-3-2/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,14 @@
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import org.apache.spark.sql.types._

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
import org.apache.kyuubi.util.AssertionUtils._

trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
Expand Down Expand Up @@ -69,10 +70,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
}
assert(e.getMessage == "Filters are only supported for partitioned table")
}("Filters are only supported for partitioned table")

sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
Expand Down Expand Up @@ -201,9 +201,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException](
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
assert(e.getMessage == "Only partition column filters are allowed")
interceptEquals[KyuubiSQLExtensionException] {
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
}("Only partition column filters are allowed")

sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")

Expand Down Expand Up @@ -232,10 +232,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
val msg = intercept[KyuubiSQLExtensionException] {
interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE t ZORDER BY c1, c2")
}.getMessage
assert(msg.contains("only support hive table"))
}("only support hive table")
}
}

Expand Down Expand Up @@ -735,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
val e1 = intercept[KyuubiSQLExtensionException] {
interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
}
assert(e1.getMessage.contains("unsupported partition predicates"))
}("unsupported partition predicates")

val e2 = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
}
assert(e2.getMessage == "Only partition column filters are allowed")
}("Only partition column filters are allowed")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import org.apache.spark.sql.types._

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
import org.apache.kyuubi.util.AssertionUtils._

trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
Expand Down Expand Up @@ -69,10 +70,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
}
assert(e.getMessage == "Filters are only supported for partitioned table")
}("Filters are only supported for partitioned table")

sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
Expand Down Expand Up @@ -201,9 +201,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException](
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
assert(e.getMessage == "Only partition column filters are allowed")
val e = interceptEquals[KyuubiSQLExtensionException] {
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
}("Only partition column filters are allowed")

sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")

Expand Down Expand Up @@ -232,10 +232,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
val msg = intercept[KyuubiSQLExtensionException] {
interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE t ZORDER BY c1, c2")
}.getMessage
assert(msg.contains("only support hive table"))
}("only support hive table")
}
}

Expand Down Expand Up @@ -735,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
val e1 = intercept[KyuubiSQLExtensionException] {
interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
}
assert(e1.getMessage.contains("unsupported partition predicates"))
}("unsupported partition predicates")

val e2 = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
}
assert(e2.getMessage == "Only partition column filters are allowed")
}("Only partition column filters are allowed")
}
}

Expand Down
8 changes: 8 additions & 0 deletions extensions/spark/kyuubi-extension-spark-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,14 @@
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import org.apache.spark.sql.types._

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, OptimizeZorderStatement, Zorder, ZorderBytesUtils}
import org.apache.kyuubi.util.AssertionUtils._

trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
Expand Down Expand Up @@ -70,10 +71,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE up WHERE c1 > 1 ZORDER BY c1, c2")
}
assert(e.getMessage == "Filters are only supported for partitioned table")
}("Filters are only supported for partitioned table")

sql("OPTIMIZE up ZORDER BY c1, c2")
val res = sql("SELECT c1, c2 FROM up").collect()
Expand Down Expand Up @@ -202,9 +202,9 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
"(2,0,2),(2,1,1),(2,2,5),(2,3,5)," +
"(3,0,3),(3,1,4),(3,2,9),(3,3,0)")

val e = intercept[KyuubiSQLExtensionException](
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2"))
assert(e.getMessage == "Only partition column filters are allowed")
interceptEquals[KyuubiSQLExtensionException] {
sql(s"OPTIMIZE p WHERE id = 1 AND c1 > 1 ZORDER BY c1, c2")
}("Only partition column filters are allowed")

sql(s"OPTIMIZE p WHERE id = 1 ZORDER BY c1, c2")

Expand Down Expand Up @@ -233,10 +233,8 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
// TODO remove this if we support datasource table
withTable("t") {
sql("CREATE TABLE t (c1 int, c2 int) USING PARQUET")
val msg = intercept[KyuubiSQLExtensionException] {
sql("OPTIMIZE t ZORDER BY c1, c2")
}.getMessage
assert(msg.contains("only support hive table"))
interceptContains[KyuubiSQLExtensionException] { sql("OPTIMIZE t ZORDER BY c1, c2") }(
"only support hive table")
}
}

Expand Down Expand Up @@ -736,15 +734,13 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with ExpressionEvalHel
test("OPTIMIZE partition predicates constraint") {
withTable("p") {
sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
val e1 = intercept[KyuubiSQLExtensionException] {
interceptContains[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
}
assert(e1.getMessage.contains("unsupported partition predicates"))
}("unsupported partition predicates")

val e2 = intercept[KyuubiSQLExtensionException] {
interceptEquals[KyuubiSQLExtensionException] {
sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
}
assert(e2.getMessage == "Only partition column filters are allowed")
}("Only partition column filters are allowed")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.scalatest.funsuite.AnyFunSuite
import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_SESSION_SIGN_PUBLICKEY, KYUUBI_SESSION_USER_KEY, KYUUBI_SESSION_USER_SIGN}
import org.apache.kyuubi.plugin.spark.authz.{AccessControlException, SparkSessionProvider}
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils
import org.apache.kyuubi.util.AssertionUtils._
import org.apache.kyuubi.util.SignUtils

class AuthzSessionSigningSuite extends AnyFunSuite
Expand Down Expand Up @@ -62,13 +63,13 @@ class AuthzSessionSigningSuite extends AnyFunSuite
// fake session user name
val fakeSessionUser = "faker"
sc.setLocalProperty(KYUUBI_SESSION_USER_KEY, fakeSessionUser)
val e1 = intercept[AccessControlException](AuthZUtils.getAuthzUgi(sc))
assertResult(s"Invalid user identifier [$fakeSessionUser]")(e1.getMessage)
interceptEquals[AccessControlException](AuthZUtils.getAuthzUgi(sc))(
s"Invalid user identifier [$fakeSessionUser]")
sc.setLocalProperty(KYUUBI_SESSION_USER_KEY, kyuubiSessionUser)

// invalid session user sign
sc.setLocalProperty(KYUUBI_SESSION_USER_SIGN, "invalid_sign")
val e2 = intercept[AccessControlException](AuthZUtils.getAuthzUgi(sc))
assertResult(s"Invalid user identifier [$kyuubiSessionUser]")(e2.getMessage)
interceptEquals[AccessControlException](AuthZUtils.getAuthzUgi(sc))(
s"Invalid user identifier [$kyuubiSessionUser]")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
import org.apache.kyuubi.tags.HudiTest
import org.apache.kyuubi.util.AssertionUtils.interceptContains
import org.apache.kyuubi.util.AssertionUtils._

/**
* Tests for RangerSparkExtensionSuite on Hudi SQL.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,12 +103,9 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
""".stripMargin

// MergeIntoTable: Using a MERGE INTO Statement
val e1 = intercept[AccessControlException](
doAs(
someone,
sql(mergeIntoSql)))
assert(e1.getMessage.contains(s"does not have [select] privilege" +
s" on [$namespace1/$table1/id]"))
interceptContains[AccessControlException] {
doAs(someone, sql(mergeIntoSql))
}(s"does not have [select] privilege on [$namespace1/$table1/id]")

withSingleCallEnabled {
interceptContains[AccessControlException](doAs(someone, sql(mergeIntoSql)))(
Expand Down Expand Up @@ -181,8 +178,8 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite

doAs(
createOnlyUser, {
val e = intercept[AccessControlException](sql(select).collect())
assert(e.getMessage === errorMessage("select", s"$namespace1/$table/key"))
interceptEquals[AccessControlException](sql(select).collect())(
errorMessage("select", s"$namespace1/$table/key"))
})
}
}
Expand Down Expand Up @@ -240,10 +237,9 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
}

test("[KYUUBI #4255] DESCRIBE TABLE") {
val e1 = intercept[AccessControlException](
doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain()))
assert(e1.getMessage.contains(s"does not have [select] privilege" +
s" on [$namespace1/$table1]"))
interceptContains[AccessControlException] {
doAs(someone, sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain())
}(s"does not have [select] privilege on [$namespace1/$table1]")
}

test("CALL RewriteDataFilesProcedure") {
Expand Down
Loading

0 comments on commit a6b47f1

Please sign in to comment.