|
29 | 29 | from sparktestsupport.utils import ( |
30 | 30 | determine_modules_for_files, |
31 | 31 | determine_modules_to_test, |
32 | | - determine_tags_to_exclude, |
33 | 32 | identify_changed_files_from_git_commits, |
34 | 33 | ) |
35 | 34 | import sparktestsupport.modules as modules |
@@ -287,7 +286,7 @@ def build_spark_assembly_sbt(extra_profiles, checkstyle=False): |
287 | 286 | if checkstyle: |
288 | 287 | run_java_style_checks(build_profiles) |
289 | 288 |
|
290 | | - if not os.environ.get("SPARK_JENKINS") and not os.environ.get("SKIP_UNIDOC"): |
| 289 | + if not os.environ.get("SKIP_UNIDOC"): |
291 | 290 | build_spark_unidoc_sbt(extra_profiles) |
292 | 291 |
|
293 | 292 |
|
@@ -395,7 +394,7 @@ def run_python_tests(test_modules, test_pythons, parallelism, with_coverage=Fals |
395 | 394 |
|
396 | 395 |
|
397 | 396 | def run_python_packaging_tests(): |
398 | | - if not os.environ.get("SPARK_JENKINS") and os.environ.get("SKIP_PACKAGING", "false") != "true": |
| 397 | + if os.environ.get("SKIP_PACKAGING", "false") != "true": |
399 | 398 | set_title_and_block("Running PySpark packaging tests", "BLOCK_PYSPARK_PIP_TESTS") |
400 | 399 | command = [os.path.join(SPARK_HOME, "dev", "run-pip-tests")] |
401 | 400 | run_cmd(command) |
@@ -507,22 +506,13 @@ def main(): |
507 | 506 | else: |
508 | 507 | print("Cannot install SparkR as R was not found in PATH") |
509 | 508 |
|
510 | | - if os.environ.get("SPARK_JENKINS"): |
511 | | - # if we're on the Amplab Jenkins build servers setup variables |
512 | | - # to reflect the environment settings |
513 | | - build_tool = os.environ.get("SPARK_JENKINS_BUILD_TOOL", "sbt") |
514 | | - scala_version = os.environ.get("SPARK_JENKINS_BUILD_SCALA_PROFILE") |
515 | | - hadoop_version = os.environ.get("SPARK_JENKINS_BUILD_PROFILE", "hadoop3") |
516 | | - test_env = "spark_jenkins" |
| 509 | + build_tool = "sbt" |
| 510 | + scala_version = os.environ.get("SCALA_PROFILE") |
| 511 | + hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3") |
| 512 | + if "GITHUB_ACTIONS" in os.environ: |
| 513 | + test_env = "github_actions" |
517 | 514 | else: |
518 | | - # else we're running locally or GitHub Actions. |
519 | | - build_tool = "sbt" |
520 | | - scala_version = os.environ.get("SCALA_PROFILE") |
521 | | - hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3") |
522 | | - if "GITHUB_ACTIONS" in os.environ: |
523 | | - test_env = "github_actions" |
524 | | - else: |
525 | | - test_env = "local" |
| 515 | + test_env = "local" |
526 | 516 |
|
527 | 517 | extra_profiles = get_hadoop_profiles(hadoop_version) + get_scala_profiles(scala_version) |
528 | 518 |
|
@@ -570,15 +560,6 @@ def main(): |
570 | 560 | print("[info] There are no modules to test, exiting without testing.") |
571 | 561 | return |
572 | 562 |
|
573 | | - # If we're running the tests in Jenkins, calculate the diff from the targeted branch, and |
574 | | - # detect modules to test. |
575 | | - elif os.environ.get("SPARK_JENKINS_PRB"): |
576 | | - target_branch = os.environ["ghprbTargetBranch"] |
577 | | - changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch) |
578 | | - changed_modules = determine_modules_for_files(changed_files) |
579 | | - test_modules = determine_modules_to_test(changed_modules) |
580 | | - excluded_tags = determine_tags_to_exclude(changed_modules) |
581 | | - |
582 | 563 | # If there is no changed module found, tests all. |
583 | 564 | if not changed_modules: |
584 | 565 | changed_modules = [modules.root] |
@@ -633,12 +614,7 @@ def main(): |
633 | 614 | ): |
634 | 615 | run_sparkr_style_checks() |
635 | 616 |
|
636 | | - # determine if docs were changed and if we're inside the jenkins environment |
637 | | - # note - the below commented out until *all* Jenkins workers can get the Bundler gem installed |
638 | | - # if "DOCS" in changed_modules and test_env == "spark_jenkins": |
639 | | - # build_spark_documentation() |
640 | | - |
641 | | - if any(m.should_run_build_tests for m in test_modules) and test_env != "spark_jenkins": |
| 617 | + if any(m.should_run_build_tests for m in test_modules): |
642 | 618 | run_build_tests() |
643 | 619 |
|
644 | 620 | # spark build |
|
0 commit comments