Skip to content

Commit d6f713e

Browse files
committed
[SPARK-53844][TESTS] Remove SPARK_JENKINS* and related logics from dev/run-tests.py
### What changes were proposed in this pull request? This PR aims to remove unused `SPARK_JENKINS`, `SPARK_JENKINS_PRB` and related logics from `dev/run-tests.py` in Apache Spark 4.1.0. ### Why are the changes needed? We are moving away from `Jenkins` environment. - Apache Spark 4.0.0: #48713 - Apache Spark 3.4.0: #39539 - Apache Spark 3.3.0: #35025 ### Does this PR introduce _any_ user-facing change? No behavior change. ### How was this patch tested? Pass the CIs and manual review because this is unused code. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #52548 from dongjoon-hyun/SPARK-53844. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent bf2457b commit d6f713e

File tree

1 file changed

+9
-33
lines changed

1 file changed

+9
-33
lines changed

dev/run-tests.py

Lines changed: 9 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
from sparktestsupport.utils import (
3030
determine_modules_for_files,
3131
determine_modules_to_test,
32-
determine_tags_to_exclude,
3332
identify_changed_files_from_git_commits,
3433
)
3534
import sparktestsupport.modules as modules
@@ -287,7 +286,7 @@ def build_spark_assembly_sbt(extra_profiles, checkstyle=False):
287286
if checkstyle:
288287
run_java_style_checks(build_profiles)
289288

290-
if not os.environ.get("SPARK_JENKINS") and not os.environ.get("SKIP_UNIDOC"):
289+
if not os.environ.get("SKIP_UNIDOC"):
291290
build_spark_unidoc_sbt(extra_profiles)
292291

293292

@@ -395,7 +394,7 @@ def run_python_tests(test_modules, test_pythons, parallelism, with_coverage=Fals
395394

396395

397396
def run_python_packaging_tests():
398-
if not os.environ.get("SPARK_JENKINS") and os.environ.get("SKIP_PACKAGING", "false") != "true":
397+
if os.environ.get("SKIP_PACKAGING", "false") != "true":
399398
set_title_and_block("Running PySpark packaging tests", "BLOCK_PYSPARK_PIP_TESTS")
400399
command = [os.path.join(SPARK_HOME, "dev", "run-pip-tests")]
401400
run_cmd(command)
@@ -507,22 +506,13 @@ def main():
507506
else:
508507
print("Cannot install SparkR as R was not found in PATH")
509508

510-
if os.environ.get("SPARK_JENKINS"):
511-
# if we're on the Amplab Jenkins build servers setup variables
512-
# to reflect the environment settings
513-
build_tool = os.environ.get("SPARK_JENKINS_BUILD_TOOL", "sbt")
514-
scala_version = os.environ.get("SPARK_JENKINS_BUILD_SCALA_PROFILE")
515-
hadoop_version = os.environ.get("SPARK_JENKINS_BUILD_PROFILE", "hadoop3")
516-
test_env = "spark_jenkins"
509+
build_tool = "sbt"
510+
scala_version = os.environ.get("SCALA_PROFILE")
511+
hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3")
512+
if "GITHUB_ACTIONS" in os.environ:
513+
test_env = "github_actions"
517514
else:
518-
# else we're running locally or GitHub Actions.
519-
build_tool = "sbt"
520-
scala_version = os.environ.get("SCALA_PROFILE")
521-
hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3")
522-
if "GITHUB_ACTIONS" in os.environ:
523-
test_env = "github_actions"
524-
else:
525-
test_env = "local"
515+
test_env = "local"
526516

527517
extra_profiles = get_hadoop_profiles(hadoop_version) + get_scala_profiles(scala_version)
528518

@@ -570,15 +560,6 @@ def main():
570560
print("[info] There are no modules to test, exiting without testing.")
571561
return
572562

573-
# If we're running the tests in Jenkins, calculate the diff from the targeted branch, and
574-
# detect modules to test.
575-
elif os.environ.get("SPARK_JENKINS_PRB"):
576-
target_branch = os.environ["ghprbTargetBranch"]
577-
changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch)
578-
changed_modules = determine_modules_for_files(changed_files)
579-
test_modules = determine_modules_to_test(changed_modules)
580-
excluded_tags = determine_tags_to_exclude(changed_modules)
581-
582563
# If there is no changed module found, tests all.
583564
if not changed_modules:
584565
changed_modules = [modules.root]
@@ -633,12 +614,7 @@ def main():
633614
):
634615
run_sparkr_style_checks()
635616

636-
# determine if docs were changed and if we're inside the jenkins environment
637-
# note - the below commented out until *all* Jenkins workers can get the Bundler gem installed
638-
# if "DOCS" in changed_modules and test_env == "spark_jenkins":
639-
# build_spark_documentation()
640-
641-
if any(m.should_run_build_tests for m in test_modules) and test_env != "spark_jenkins":
617+
if any(m.should_run_build_tests for m in test_modules):
642618
run_build_tests()
643619

644620
# spark build

0 commit comments

Comments
 (0)