Skip to content

[SPARK-46941][SQL][3.5] Can't insert window group limit node for top-k computation if contains SizeBasedWindowFunction #51422

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.catalyst.optimizer

import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, CurrentRow, DenseRank, EqualTo, Expression, GreaterThan, GreaterThanOrEqual, IntegerLiteral, LessThan, LessThanOrEqual, Literal, NamedExpression, PredicateHelper, Rank, RowFrame, RowNumber, SpecifiedWindowFrame, UnboundedPreceding, WindowExpression, WindowSpecDefinition}
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, CurrentRow, DenseRank, EqualTo, Expression, GreaterThan, GreaterThanOrEqual, IntegerLiteral, LessThan, LessThanOrEqual, Literal, NamedExpression, PredicateHelper, Rank, RowFrame, RowNumber, SizeBasedWindowFunction, SpecifiedWindowFrame, UnboundedPreceding, WindowExpression, WindowSpecDefinition}
import org.apache.spark.sql.catalyst.plans.logical.{Filter, Limit, LocalRelation, LogicalPlan, Window, WindowGroupLimit}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreePattern.{FILTER, WINDOW}
Expand Down Expand Up @@ -53,13 +53,14 @@ object InferWindowGroupLimit extends Rule[LogicalPlan] with PredicateHelper {
}

/**
* All window expressions should use the same expanding window, so that
* we can safely do the early stop.
* All window expressions should use the same expanding window and do not contains
* `SizeBasedWindowFunction`, so that we can safely do the early stop.
*/
private def isExpandingWindow(
windowExpression: NamedExpression): Boolean = windowExpression match {
case Alias(WindowExpression(_, WindowSpecDefinition(_, _,
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow))), _) => true
case Alias(WindowExpression(windowFunction, WindowSpecDefinition(_, _,
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow))), _)
if !windowFunction.isInstanceOf[SizeBasedWindowFunction] => true
case _ => false
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{CurrentRow, DenseRank, Literal, NthValue, NTile, Rank, RowFrame, RowNumber, SpecifiedWindowFrame, UnboundedPreceding}
import org.apache.spark.sql.catalyst.expressions.{CurrentRow, DenseRank, Literal, NthValue, NTile, PercentRank, Rank, RowFrame, RowNumber, SpecifiedWindowFrame, UnboundedPreceding}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
Expand Down Expand Up @@ -338,4 +338,20 @@ class InferWindowGroupLimitSuite extends PlanTest {
WithoutOptimize.execute(correctAnswer1.analyze))
}
}

test("SPARK-46941: Can't Insert window group limit node for top-k computation if contains " +
"SizeBasedWindowFunction") {
val originalQuery =
testRelation
.select(a, b, c,
windowExpr(Rank(c :: Nil),
windowSpec(a :: Nil, c.desc :: Nil, windowFrame)).as("rank"),
windowExpr(PercentRank(c :: Nil),
windowSpec(a :: Nil, c.desc :: Nil, windowFrame)).as("percent_rank"))
.where(Symbol("rank") < 2)

comparePlans(
Optimize.execute(originalQuery.analyze),
WithoutOptimize.execute(originalQuery.analyze))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1637,4 +1637,31 @@ class DataFrameWindowFunctionsSuite extends QueryTest
}
}
}

test("SPARK-46941: Can't insert window group limit node for top-k computation if contains " +
"SizeBasedWindowFunction") {
val df = Seq(
(1, "Dave", 1, 2020),
(2, "Mark", 2, 2020),
(3, "Amy", 3, 2020),
(4, "Dave", 1, 2021),
(5, "Mark", 2, 2021),
(6, "Amy", 3, 2021),
(7, "John", 4, 2021)).toDF("id", "name", "score", "year")

val window = Window.partitionBy($"year").orderBy($"score".desc)

Seq(-1, 100).foreach { threshold =>
withSQLConf(SQLConf.WINDOW_GROUP_LIMIT_THRESHOLD.key -> threshold.toString) {
val df2 = df
.withColumn("rank", rank().over(window))
.withColumn("percent_rank", percent_rank().over(window))
.sort($"year")
checkAnswer(df2.filter("rank=2"), Seq(
Row(2, "Mark", 2, 2020, 2, 0.5),
Row(6, "Amy", 3, 2021, 2, 0.3333333333333333)
))
}
}
}
}