Skip to content

Commit

Permalink
[Spark] Avoid unnecessarily calling update and some minor clean up in…
Browse files Browse the repository at this point in the history
… tests (#3965)
  • Loading branch information
ctring authored Dec 18, 2024
1 parent 1cd6fed commit 34f02d8
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ object VacuumCommand extends VacuumCommandImpl with Serializable {
// The start and the end commit versions give the range of commit files we want to look into
// to get the list of eligible files for deletion.
val eligibleStartCommitVersion = math.min(
deltaLog.update().version,
snapshot.version,
latestCommitVersionOutsideOfRetentionWindowAsOfLastVacuumOpt
.map(_ + 1).getOrElse(earliestCommitVersion))
val eligibleEndCommitVersion = latestCommitVersionOutsideOfRetentionWindow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -512,14 +512,14 @@ class DeltaVacuumSuite extends DeltaVacuumSuiteBase with DeltaSQLCommandTest {

testQuietly("basic case - SQL command on path-based tables with direct 'path'") {
withEnvironment { (tempDir, _) =>
val table = DeltaTableV2(spark, new Path(tempDir))
val table = DeltaTableV2(spark, tempDir)
vacuumSQLTest(table, tableName = s"'$tempDir'")
}
}

testQuietly("basic case - SQL command on path-based table with delta.`path`") {
withEnvironment { (tempDir, _) =>
val table = DeltaTableV2(spark, new Path(tempDir))
val table = DeltaTableV2(spark, tempDir)
vacuumSQLTest(table, tableName = s"delta.`$tempDir`")
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{QueryTest, Row, SparkSession}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.{ManualClock, SystemClock}
import org.apache.spark.util.ManualClock

class CoordinatedCommitsSuite
extends QueryTest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,9 @@ object DeltaTestImplicits {
def apply(spark: SparkSession, id: TableIdentifier): DeltaTableV2 =
dt.apply(spark, id, "test")

def apply(spark: SparkSession, tableDir: File): DeltaTableV2 =
dt.apply(spark, new Path(tableDir.getAbsolutePath))

def apply(spark: SparkSession, tableDir: File, clock: Clock): DeltaTableV2 = {
val tablePath = new Path(tableDir.getAbsolutePath)
new DeltaTableV2(spark, tablePath) {
Expand Down

0 comments on commit 34f02d8

Please sign in to comment.