Skip to content

Commit 0b5b0d5

Browse files
committed
[SPARK-51293][CORE][SQL][SS][MLLIB][TESTS] Cleanup unused private functions from test suites
### What changes were proposed in this pull request? This pr aims to cleanup unused private functions from test suites. ### Why are the changes needed? Code cleanup ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Pass Github Actions ### Was this patch authored or co-authored using generative AI tooling? No Closes apache#50049 from LuciferYang/SPARK-51293. Lead-authored-by: yangjie01 <[email protected]> Co-authored-by: YangJie <[email protected]> Signed-off-by: yangjie01 <[email protected]>
1 parent 3027968 commit 0b5b0d5

File tree

17 files changed

+2
-166
lines changed

17 files changed

+2
-166
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala

-2
Original file line numberDiff line numberDiff line change
@@ -806,8 +806,6 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu
806806

807807
protected def caseConvert(tableName: String): String = tableName
808808

809-
private def withOrWithout(isDistinct: Boolean): String = if (isDistinct) "with" else "without"
810-
811809
Seq(true, false).foreach { isDistinct =>
812810
val distinct = if (isDistinct) "DISTINCT " else ""
813811
val withOrWithout = if (isDistinct) "with" else "without"

core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala

-15
Original file line numberDiff line numberDiff line change
@@ -1924,8 +1924,6 @@ private object ExecutorAllocationManagerSuite extends PrivateMethodTester {
19241924
PrivateMethod[mutable.HashMap[Int, Int]](Symbol("numLocalityAwareTasksPerResourceProfileId"))
19251925
private val _rpIdToHostToLocalTaskCount =
19261926
PrivateMethod[Map[Int, Map[String, Int]]](Symbol("rpIdToHostToLocalTaskCount"))
1927-
private val _onSpeculativeTaskSubmitted =
1928-
PrivateMethod[Unit](Symbol("onSpeculativeTaskSubmitted"))
19291927
private val _totalRunningTasksPerResourceProfile =
19301928
PrivateMethod[Int](Symbol("totalRunningTasksPerResourceProfile"))
19311929

@@ -1942,12 +1940,6 @@ private object ExecutorAllocationManagerSuite extends PrivateMethodTester {
19421940
nmap(rp.id)
19431941
}
19441942

1945-
private def updateAndSyncNumExecutorsTarget(
1946-
manager: ExecutorAllocationManager,
1947-
now: Long): Unit = {
1948-
manager invokePrivate _updateAndSyncNumExecutorsTarget(now)
1949-
}
1950-
19511943
private def numExecutorsTargetForDefaultProfileId(manager: ExecutorAllocationManager): Int = {
19521944
numExecutorsTarget(manager, defaultProfile.id)
19531945
}
@@ -2025,10 +2017,6 @@ private object ExecutorAllocationManagerSuite extends PrivateMethodTester {
20252017
manager invokePrivate _onSchedulerQueueEmpty()
20262018
}
20272019

2028-
private def onSpeculativeTaskSubmitted(manager: ExecutorAllocationManager, id: String) : Unit = {
2029-
manager invokePrivate _onSpeculativeTaskSubmitted(id)
2030-
}
2031-
20322020
private def localityAwareTasksForDefaultProfile(manager: ExecutorAllocationManager): Int = {
20332021
val localMap = manager invokePrivate _localityAwareTasksPerResourceProfileId()
20342022
localMap(defaultProfile.id)
@@ -2044,7 +2032,4 @@ private object ExecutorAllocationManagerSuite extends PrivateMethodTester {
20442032
rpIdToHostLocal(defaultProfile.id)
20452033
}
20462034

2047-
private def getResourceProfileIdOfExecutor(manager: ExecutorAllocationManager): Int = {
2048-
defaultProfile.id
2049-
}
20502035
}

core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala

-8
Original file line numberDiff line numberDiff line change
@@ -233,14 +233,6 @@ class AppClientSuite
233233
// | Utility methods for testing |
234234
// ===============================
235235

236-
/** Return a SparkConf for applications that want to talk to our Master. */
237-
private def appConf: SparkConf = {
238-
new SparkConf()
239-
.setMaster(masterRpcEnv.address.toSparkURL)
240-
.setAppName("test")
241-
.set("spark.executor.memory", "256m")
242-
}
243-
244236
/** Make a master to which our application will send executor requests. */
245237
private def makeMaster(): Master = {
246238
val master = new Master(masterRpcEnv, masterRpcEnv.address, 0, securityManager, conf)

core/src/test/scala/org/apache/spark/storage/StorageSuite.scala

-23
Original file line numberDiff line numberDiff line change
@@ -105,29 +105,6 @@ class StorageSuite extends SparkFunSuite {
105105
assert(status.diskUsed === actualDiskUsed)
106106
}
107107

108-
// For testing StorageUtils.updateRddInfo and StorageUtils.getRddBlockLocations
109-
private def stockStorageStatuses: Seq[StorageStatus] = {
110-
val status1 = new StorageStatus(BlockManagerId("big", "dog", 1), 1000L, Some(1000L), Some(0L))
111-
val status2 = new StorageStatus(BlockManagerId("fat", "duck", 2), 2000L, Some(2000L), Some(0L))
112-
val status3 = new StorageStatus(BlockManagerId("fat", "cat", 3), 3000L, Some(3000L), Some(0L))
113-
status1.addBlock(RDDBlockId(0, 0), BlockStatus(memAndDisk, 1L, 2L))
114-
status1.addBlock(RDDBlockId(0, 1), BlockStatus(memAndDisk, 1L, 2L))
115-
status2.addBlock(RDDBlockId(0, 2), BlockStatus(memAndDisk, 1L, 2L))
116-
status2.addBlock(RDDBlockId(0, 3), BlockStatus(memAndDisk, 1L, 2L))
117-
status2.addBlock(RDDBlockId(1, 0), BlockStatus(memAndDisk, 1L, 2L))
118-
status2.addBlock(RDDBlockId(1, 1), BlockStatus(memAndDisk, 1L, 2L))
119-
status3.addBlock(RDDBlockId(0, 4), BlockStatus(memAndDisk, 1L, 2L))
120-
status3.addBlock(RDDBlockId(1, 2), BlockStatus(memAndDisk, 1L, 2L))
121-
Seq(status1, status2, status3)
122-
}
123-
124-
// For testing StorageUtils.updateRddInfo
125-
private def stockRDDInfos: Seq[RDDInfo] = {
126-
val info0 = new RDDInfo(0, "0", 10, memAndDisk, false, Seq(3))
127-
val info1 = new RDDInfo(1, "1", 3, memAndDisk, false, Seq(4))
128-
Seq(info0, info1)
129-
}
130-
131108
private val offheap = StorageLevel.OFF_HEAP
132109
// For testing add, update, remove, get, and contains etc. for both RDD and non-RDD onheap
133110
// and offheap blocks

core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala

-16
Original file line numberDiff line numberDiff line change
@@ -1370,18 +1370,6 @@ private[spark] object JsonProtocolSuite extends Assertions {
13701370
}
13711371
}
13721372

1373-
private def assertOptionEquals[T](
1374-
opt1: Option[T],
1375-
opt2: Option[T],
1376-
assertEquals: (T, T) => Unit): Unit = {
1377-
if (opt1.isDefined) {
1378-
assert(opt2.isDefined)
1379-
assertEquals(opt1.get, opt2.get)
1380-
} else {
1381-
assert(opt2.isEmpty)
1382-
}
1383-
}
1384-
13851373
/**
13861374
* Use different names for methods we pass in to assertSeqEquals or assertOptionEquals
13871375
*/
@@ -1407,10 +1395,6 @@ private[spark] object JsonProtocolSuite extends Assertions {
14071395
assert(ste1.getFileName === ste2.getFileName)
14081396
}
14091397

1410-
private def assertEquals(rp1: ResourceProfile, rp2: ResourceProfile): Unit = {
1411-
assert(rp1 === rp2)
1412-
}
1413-
14141398
/** ----------------------------------- *
14151399
| Util methods for constructing events |
14161400
* ------------------------------------ */

mllib/src/test/scala/org/apache/spark/mllib/regression/IsotonicRegressionSuite.scala

-10
Original file line numberDiff line numberDiff line change
@@ -72,16 +72,6 @@ class IsotonicRegressionSuite extends SparkFunSuite with MLlibTestSparkContext w
7272
runIsotonicRegression(labels, Array.fill(labels.size)(1d).toImmutableArraySeq, isotonic)
7373
}
7474

75-
private def runIsotonicRegression(
76-
labels: Seq[Double],
77-
features: Seq[Double],
78-
weights: Seq[Double],
79-
isotonic: Boolean): IsotonicRegressionModel = {
80-
runIsotonicRegressionOnInput(
81-
labels.indices.map(i => (labels(i), features(i), weights(i))),
82-
isotonic)
83-
}
84-
8575
private def runIsotonicRegressionOnInput(
8676
input: Seq[(Double, Double, Double)],
8777
isotonic: Boolean,

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercionSuite.scala

-21
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
2424
import org.apache.spark.sql.catalyst.dsl.plans._
2525
import org.apache.spark.sql.catalyst.expressions._
2626
import org.apache.spark.sql.catalyst.plans.logical._
27-
import org.apache.spark.sql.catalyst.types.DataTypeUtils
2827
import org.apache.spark.sql.internal.SQLConf
2928
import org.apache.spark.sql.internal.types.{AbstractArrayType, StringTypeWithCollation}
3029
import org.apache.spark.sql.types._
@@ -64,26 +63,6 @@ class AnsiTypeCoercionSuite extends TypeCoercionSuiteBase {
6463

6564
override def dateTimeOperationsRule: TypeCoercionRule = AnsiTypeCoercion.DateTimeOperations
6665

67-
private def shouldCastStringLiteral(to: AbstractDataType, expected: DataType): Unit = {
68-
val input = Literal("123")
69-
val castResult = AnsiTypeCoercion.implicitCast(input, to)
70-
assert(DataTypeUtils.equalsIgnoreCaseAndNullability(
71-
castResult.map(_.dataType).orNull, expected),
72-
s"Failed to cast String literal to $to")
73-
}
74-
75-
private def shouldNotCastStringLiteral(to: AbstractDataType): Unit = {
76-
val input = Literal("123")
77-
val castResult = AnsiTypeCoercion.implicitCast(input, to)
78-
assert(castResult.isEmpty, s"Should not be able to cast String literal to $to")
79-
}
80-
81-
private def shouldNotCastStringInput(to: AbstractDataType): Unit = {
82-
val input = AttributeReference("s", StringType)()
83-
val castResult = AnsiTypeCoercion.implicitCast(input, to)
84-
assert(castResult.isEmpty, s"Should not be able to cast non-foldable String input to $to")
85-
}
86-
8766
private def checkWidenType(
8867
widenFunc: (DataType, DataType) => Option[DataType],
8968
t1: DataType,

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala

+1-9
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ import org.apache.spark.SparkThrowable
2020
import org.apache.spark.sql.catalyst.InternalRow
2121
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal, NamedArgumentExpression}
2222
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
23-
import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, FunctionSignature, InputParameter, NamedParametersSupport}
23+
import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, InputParameter, NamedParametersSupport}
2424
import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLId
2525
import org.apache.spark.sql.types.DataType
2626

@@ -89,14 +89,6 @@ class NamedParameterFunctionSuite extends AnalysisTest {
8989
NamedParametersSupport.defaultRearrange(functionSignature, expressions, functionName))
9090
}
9191

92-
private def parseExternalException[T <: FunctionBuilderBase[_]](
93-
functionName: String,
94-
builder: T,
95-
expressions: Seq[Expression]) : SparkThrowable = {
96-
intercept[SparkThrowable](
97-
FunctionRegistry.rearrangeExpressions[T](functionName, builder, expressions))
98-
}
99-
10092
test("DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT") {
10193
val condition =
10294
"DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED"

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala

-3
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.catalyst.encoders
1919

20-
import scala.reflect.ClassTag
21-
2220
import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite, SparkUnsupportedOperationException}
2321
import org.apache.spark.sql.Encoders
2422

@@ -98,5 +96,4 @@ class EncoderErrorMessageSuite extends SparkFunSuite {
9896
)
9997
}
10098

101-
private def clsName[T : ClassTag]: String = implicitly[ClassTag[T]].runtimeClass.getName
10299
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala

-4
Original file line numberDiff line numberDiff line change
@@ -303,10 +303,6 @@ class CastWithAnsiOnSuite extends CastSuiteBase with QueryErrorsBase {
303303
s"cannot be cast to ${toSQLType(to)} because it is malformed."
304304
}
305305

306-
private def castErrMsg(l: Literal, to: DataType): String = {
307-
castErrMsg(l, to, l.dataType)
308-
}
309-
310306
test("cast from invalid string to numeric should throw NumberFormatException") {
311307
def check(value: String, dataType: DataType): Unit = {
312308
checkExceptionInExpression[NumberFormatException](cast(value, dataType),

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala

-12
Original file line numberDiff line numberDiff line change
@@ -168,18 +168,6 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
168168
assert(safeStringToInterval(UTF8String.fromString(input)) === null)
169169
}
170170

171-
private def checkFromInvalidStringUnknownError(input: String, word: String): Unit = {
172-
checkError(
173-
exception = intercept[SparkIllegalArgumentException] {
174-
stringToInterval(UTF8String.fromString(input))
175-
},
176-
condition = "INVALID_INTERVAL_FORMAT.UNKNOWN_PARSING_ERROR",
177-
parameters = Map(
178-
"input" -> Option(input).map(_.toString).getOrElse("null"),
179-
"word" -> word))
180-
assert(safeStringToInterval(UTF8String.fromString(input)) === null)
181-
}
182-
183171
private def failFuncWithInvalidInput(
184172
input: String, errorMsg: String, converter: String => CalendarInterval): Unit = {
185173
withClue("Expected to throw an exception for the invalid input") {

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

-10
Original file line numberDiff line numberDiff line change
@@ -1388,16 +1388,6 @@ class DataSourceV2SQLSuiteV1Filter
13881388
}
13891389
}
13901390

1391-
private def testShowNamespaces(
1392-
sqlText: String,
1393-
expected: Seq[String]): Unit = {
1394-
val schema = new StructType().add("namespace", StringType, nullable = false)
1395-
1396-
val df = spark.sql(sqlText)
1397-
assert(df.schema === schema)
1398-
assert(df.collect().map(_.getAs[String](0)).sorted === expected.sorted)
1399-
}
1400-
14011391
test("Use: basic tests with USE statements") {
14021392
val catalogManager = spark.sessionState.catalogManager
14031393

sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala

-7
Original file line numberDiff line numberDiff line change
@@ -453,13 +453,6 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
453453
}
454454
}
455455

456-
private def assertRelationNotFound(query: String, relation: String): Unit = {
457-
val e = intercept[AnalysisException] {
458-
sql(query)
459-
}
460-
checkErrorTableNotFound(e, relation)
461-
}
462-
463456
private def assertRelationNotFound(query: String, relation: String, context: ExpectedContext):
464457
Unit = {
465458
val e = intercept[AnalysisException] {

sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/StateStoreBasicOperationsBenchmark.scala

-4
Original file line numberDiff line numberDiff line change
@@ -353,10 +353,6 @@ object StateStoreBasicOperationsBenchmark extends SqlBasedBenchmark {
353353
}
354354
}
355355

356-
private def getRows(store: StateStore, keys: Seq[UnsafeRow]): Seq[UnsafeRow] = {
357-
keys.map(key => store.get(key))
358-
}
359-
360356
private def loadInitialData(
361357
provider: StateStoreProvider,
362358
data: Seq[(UnsafeRow, UnsafeRow)]): Long = {

sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala

-8
Original file line numberDiff line numberDiff line change
@@ -235,14 +235,6 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
235235

236236
override val streamingTimeout = 80.seconds
237237

238-
/** Use `format` and `path` to create FileStreamSource via DataFrameReader */
239-
private def createFileStreamSource(
240-
format: String,
241-
path: String,
242-
schema: Option[StructType] = None): FileStreamSource = {
243-
getSourceFromFileStream(createFileStream(format, path, schema))
244-
}
245-
246238
private def createFileStreamSourceAndGetSchema(
247239
format: Option[String],
248240
path: Option[String],

sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -482,8 +482,6 @@ class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter {
482482
meq(Map.empty))
483483
}
484484

485-
private def newTextInput = Utils.createTempDir(namePrefix = "text").getCanonicalPath
486-
487485
test("check foreach() catches null writers") {
488486
val df = spark.readStream
489487
.format("org.apache.spark.sql.streaming.test")

sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcQuerySuite.scala

+1-12
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import org.apache.orc.OrcConf
2626
import org.apache.spark.sql.{AnalysisException, Row}
2727
import org.apache.spark.sql.catalyst.TableIdentifier
2828
import org.apache.spark.sql.catalyst.catalog.HiveTableRelation
29-
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation, LogicalRelationWithTable}
29+
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
3030
import org.apache.spark.sql.execution.datasources.orc.OrcQueryTest
3131
import org.apache.spark.sql.hive.{HiveSessionCatalog, HiveUtils}
3232
import org.apache.spark.sql.hive.test.TestHiveSingleton
@@ -231,17 +231,6 @@ class HiveOrcQuerySuite extends OrcQueryTest with TestHiveSingleton {
231231
.getCachedDataSourceTable(table)
232232
}
233233

234-
private def checkCached(tableIdentifier: TableIdentifier): Unit = {
235-
getCachedDataSourceTable(tableIdentifier) match {
236-
case null => fail(s"Converted ${tableIdentifier.table} should be cached in the cache.")
237-
case LogicalRelationWithTable(_: HadoopFsRelation, _) => // OK
238-
case other =>
239-
fail(
240-
s"The cached ${tableIdentifier.table} should be a HadoopFsRelation. " +
241-
s"However, $other is returned form the cache.")
242-
}
243-
}
244-
245234
test("SPARK-28573 ORC conversation could be applied for partitioned table insertion") {
246235
withTempView("single") {
247236
val singleRowDF = Seq((0, "foo")).toDF("key", "value")

0 commit comments

Comments
 (0)